From bd2523521522a125ffbb2a9aa31baea74eea7230 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:03:31 +0100 Subject: [PATCH 001/146] enabled all rest controllers --- .../api/v1/V1IndividualController.java | 18 +++++++++--------- .../api/v1/V1OntologyController.java | 2 +- .../api/v1/V1OntologyIndividualController.java | 2 +- .../api/v1/V1OntologyPropertyController.java | 2 +- .../api/v1/V1PropertyController.java | 8 ++++---- .../controller/api/v1/V1SearchController.java | 2 +- .../controller/api/v1/V1SelectController.java | 2 +- .../controller/api/v1/V1SuggestController.java | 2 +- .../controller/api/v2/V2ClassController.java | 2 +- .../controller/api/v2/V2EntityController.java | 2 +- .../api/v2/V2IndividualController.java | 2 +- .../api/v2/V2OntologyController.java | 2 +- .../api/v2/V2PropertyController.java | 2 +- .../api/v2/V2StatisticsController.java | 2 +- 14 files changed, 25 insertions(+), 25 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java index c8b900379..7c967b65b 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java @@ -28,7 +28,7 @@ * @date 18/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/individuals") @ExposesResourceFor(V1Individual.class) public class V1IndividualController implements @@ -81,7 +81,7 @@ HttpEntity> getAllIndividuals( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + @RequestMapping(path = "/findByIdAndIsDefiningOntology/{id}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @PathVariable("id") String termId, @@ -92,11 +92,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( decoded = UriUtils.decode(termId, "UTF-8"); return getAllIndividualsByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - - - @RequestMapping(path = "/findByIdAndIsDefiningOntology", - produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, + } + + + @RequestMapping(path = "/findByIdAndIsDefiningOntology", + produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) String iri, @@ -120,11 +120,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { } -} \ No newline at end of file +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index a7bb975fc..89b708a69 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -31,7 +31,7 @@ * @date 19/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/ontologies") @ExposesResourceFor(V1Ontology.class) public class V1OntologyController implements diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java index 5ea636170..75b5ca482 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java @@ -34,7 +34,7 @@ * @date 02/11/15 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/ontologies") public class V1OntologyIndividualController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java index 9e7441d7e..e01786710 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java @@ -27,7 +27,7 @@ import javax.servlet.http.HttpServletRequest; import java.util.Arrays; -@Controller +@RestController @RequestMapping("/api/ontologies") public class V1OntologyPropertyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java index b6dce598a..3bc5a7fdc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java @@ -23,7 +23,7 @@ import javax.servlet.http.HttpServletRequest; -@Controller +@RestController @RequestMapping("/api/properties") @ExposesResourceFor(V1Property.class) public class V1PropertyController implements @@ -93,8 +93,8 @@ HttpEntity> getPropertiesByIriAndIsDefiningOntology(@Path String decoded = null; decoded = UriUtils.decode(termId, "UTF-8"); return getPropertiesByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - + } + @RequestMapping(path = "/findByIdAndIsDefiningOntology", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getPropertiesByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) String iri, @@ -121,7 +121,7 @@ else if (oboId != null) { return new ResponseEntity<>( assembler.toModel(terms, termAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index a1f4ac641..bef135771 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -35,7 +35,7 @@ * @date 02/07/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController public class V1SearchController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index eeb0008ed..a1e344d8c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -28,7 +28,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -@Controller +@RestController public class V1SelectController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 2cc170795..018ed1bc2 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -22,7 +22,7 @@ import java.nio.charset.StandardCharsets; import java.util.*; -@Controller +@RestController public class V1SuggestController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index d8b8b4fde..ec0e4fd6e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2ClassController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java index 6c760ae8e..11ad5e444 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2EntityController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java index bff23a360..e8342f2cd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2IndividualController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 6a9c8501e..ce996db00 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2/ontologies") public class V2OntologyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java index ee847f3cd..9d690a53c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2PropertyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 0af7b2460..31b51eea0 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -20,7 +20,7 @@ import java.util.HashMap; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2/stats") public class V2StatisticsController { From 8811ab9ae4b9ee5e1091611ae1b4ede2df08c229 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:30:21 +0100 Subject: [PATCH 002/146] added imports --- .../ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java | 5 ++--- .../ebi/spot/ols/controller/api/v1/V1SearchController.java | 4 +--- .../ebi/spot/ols/controller/api/v1/V1SelectController.java | 4 +--- .../ebi/spot/ols/controller/api/v1/V1SuggestController.java | 4 +--- .../ac/ebi/spot/ols/controller/api/v2/V2ClassController.java | 5 +---- .../ebi/spot/ols/controller/api/v2/V2EntityController.java | 5 +---- .../spot/ols/controller/api/v2/V2IndividualController.java | 5 +---- .../ebi/spot/ols/controller/api/v2/V2OntologyController.java | 5 +---- .../spot/ols/controller/api/v2/V2StatisticsController.java | 3 +-- 9 files changed, 10 insertions(+), 30 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java index 4452d07bf..dabe0309e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java @@ -6,8 +6,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; @@ -16,7 +15,7 @@ * @date 27/09/2016 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController public class V1ApiUnavailable { @RequestMapping(path = "/api/unavailable", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index bef135771..e4c0273a0 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -11,9 +11,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index a1e344d8c..300640643 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -11,9 +11,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 018ed1bc2..74db8821a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -10,9 +10,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index ec0e4fd6e..3c1fd9e9a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -15,10 +15,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java index 11ad5e444..8d8c254bd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java @@ -10,10 +10,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java index e8342f2cd..c381840b5 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java @@ -11,10 +11,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index ce996db00..1d322cd1d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -12,10 +12,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; import uk.ac.ebi.spot.ols.model.v2.V2Entity; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 31b51eea0..f676af1dc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -11,8 +11,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; From 865418e08b53be6ae63c13903130da95e5da5940 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 3 Jan 2024 14:49:54 +0100 Subject: [PATCH 003/146] added skos tree generation controllers --- .../ols/controller/api/v1/TopConceptEnum.java | 7 + .../v1/V1OntologySKOSConceptController.java | 296 ++++++++++++++++++ .../ebi/spot/ols/repository/v1/TreeNode.java | 108 +++++++ .../ols/repository/v1/V1TermRepository.java | 233 ++++++++++++++ dataload/configs/skos_ontologies.json | 113 +++++++ 5 files changed, 757 insertions(+) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java create mode 100644 dataload/configs/skos_ontologies.json diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java new file mode 100644 index 000000000..95aceccbc --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java @@ -0,0 +1,7 @@ +package uk.ac.ebi.spot.ols.controller.api.v1; + +public enum TopConceptEnum { + SCHEMA, + TOPCONCEPTOF_PROPERTY, + RELATIONSHIPS, +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java new file mode 100644 index 000000000..6f2dd346a --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -0,0 +1,296 @@ +package uk.ac.ebi.spot.ols.controller.api.v1; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.tags.Tag; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.util.UriUtils; + +import uk.ac.ebi.spot.ols.model.v1.V1Term; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; +import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.List; + +/** + * @author Simon Jupp + * @date 02/11/15 + * Samples, Phenotypes and Ontologies Team, EMBL-EBI + */ +@RestController +@RequestMapping("/api/ontologies") +@Tag(name = "v1-ontology-skos-controller", description = "SKOS concept hierarchies and relations extracted from individuals (instances) from a particular ontology in this service") +public class V1OntologySKOSConceptController { + + private Logger log = LoggerFactory.getLogger(getClass()); + + @Autowired + private V1TermRepository termRepository; + + @Autowired + V1TermAssembler termAssembler; + + @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/concepthierarchy", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity>> getSKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + if (TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + return new ResponseEntity<>(termRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); + else + return new ResponseEntity<>(termRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); + } + + @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/displayconcepthierarchy", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + List> rootIndividuals = null; + if(TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + rootIndividuals = termRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); + else + rootIndividuals = termRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); + StringBuilder sb = new StringBuilder(); + for (TreeNode root : rootIndividuals) { + sb.append(root.getIndex() + " , "+ root.getData().label + " , " + root.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); + } + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/concepthierarchy/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V1Term()); + String decodedIri; + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + if (topConcept.getData().iri == null) + throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); + return new ResponseEntity<>(topConcept, HttpStatus.OK); + } + + @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/displayconcepthierarchy/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V1Term()); + String decodedIri; + StringBuilder sb = new StringBuilder(); + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + sb.append(topConcept.getIndex() + " , "+ topConcept.getData().label + " , " + topConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/conceptrelations/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + + return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); + + } + @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/displayconceptrelations/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + int count = 0; + for (V1Term individual : conceptPage.getContent()) + sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); + + return new HttpEntity<>( sb.toString()); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/conceptrelationsindirectly/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + return new ResponseEntity<>( related, HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/displayconceptrelationsindirectly/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Parameter(description = "Page size to retrieve individuals", required = true) + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + int count = 0; + for (V1Term individual : related) + sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); + + + return new ResponseEntity<>( sb.toString(), HttpStatus.OK); + + } + + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { + StringBuilder sb = new StringBuilder(); + for (TreeNode childConcept : rootConcept.getChildren()) { + sb.append(childConcept.getIndex() + " , "+ childConcept.getData().label + " , " + childConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(childConcept,displayRelated)); + } + if(displayRelated) + for (TreeNode relatedConcept : rootConcept.getRelated()) { + sb.append(relatedConcept.getIndex() + " , "+ relatedConcept.getData().label + " , " + relatedConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(relatedConcept,displayRelated)); + } + return sb; + } + + @RequestMapping(method = RequestMethod.GET, produces = {MediaType.TEXT_PLAIN_VALUE}, value = "/removeConceptTreeCache") + public HttpEntity removeConceptTreeCache() { + return new HttpEntity(termRepository.removeConceptTreeCache()); + } + + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") + @ExceptionHandler(ResourceNotFoundException.class) + public void handleError(HttpServletRequest req, Exception exception) { + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java new file mode 100644 index 000000000..1ca07e9b3 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java @@ -0,0 +1,108 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; + +public class TreeNode implements Serializable { + + /** + * + */ + private static final long serialVersionUID = -343190255910189166L; + private Collection> children = new ArrayList>(); + private Collection> related = new ArrayList>(); + private Collection> parent = new ArrayList>(); + private String index; + private T data = null; + + public TreeNode(T data) { + this.data = data; + } + + public TreeNode(T data, Collection> parent) { + this.data = data; + this.parent = parent; + } + + public Collection> getChildren() { + return children; + } + public void setChildren(Collection> children) { + this.children = children; + } + + public void addChild(T data) { + TreeNode child = new TreeNode(data); + this.children.add(child); + } + + public void addChild(TreeNode child) { + this.children.add(child); + } + + public void addRelated(T data) { + TreeNode related = new TreeNode(data); + this.related.add(related); + } + + public void addRelated(TreeNode related) { + this.related.add(related); + } + + public void addParent(T data) { + TreeNode parent = new TreeNode(data); + this.parent.add(parent); + } + + public void addParent(TreeNode parent) { + this.parent.add(parent); + } + + public Collection> getRelated() { + return related; + } + public void setRelated(Collection> related) { + this.related = related; + } + public Collection> getParent() { + return parent; + } + public void setParent(Collection> parent) { + this.parent = parent; + } + public String getIndex() { + return index; + } + public void setIndex(String index) { + this.index = index; + } + + public T getData() { + return this.data; + } + + public void setData(T data) { + this.data = data; + } + + public boolean isRoot() { + return this.parent.size() == 0; + } + + public boolean isLeaf() { + return this.children.size() == 0; + } + + public void resetParent() { + this.parent = new ArrayList>(); + } + + public void resetChildren() { + this.children = new ArrayList>(); + } + + public void resetRelated() { + this.related = new ArrayList>(); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java index f4965be37..ec078c81d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java @@ -4,7 +4,10 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.CacheEvict; +import org.springframework.cache.annotation.Cacheable; import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; import uk.ac.ebi.spot.ols.model.v1.V1Individual; @@ -16,9 +19,13 @@ import uk.ac.ebi.spot.ols.repository.solr.SearchType; import uk.ac.ebi.spot.ols.repository.v1.mappers.V1TermMapper; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Set; @Component public class V1TermRepository { @@ -290,5 +297,231 @@ public Page findAllByOboIdAndIsDefiningOntology(String oboId, String lan public Page getInstances(String ontologyId, String iri, Pageable pageable) { throw new RuntimeException(); } + + + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + List> rootIndividuals = new ArrayList>(); + int count = 0; + + if(schema) { + for (V1Term indiv : listOfTerms) + if (indiv.annotation.get("hasTopConcept") != null) { + for (String iriTopConcept : (LinkedHashSet) indiv.annotation.get("hasTopConcept")) { + V1Term topConceptIndividual = findIndividual(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptIndividual); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(topConceptIndividual,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); + } + rootIndividuals.add(topConcept); + } + } + } else for (V1Term individual : listOfTerms) { + TreeNode tree = new TreeNode(individual); + + if (tree.isRoot() && individual.annotation.get("topConceptOf") != null) { + tree.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(individual,tree,listOfTerms); + else + populateChildrenandRelatedByBroader(individual,tree,listOfTerms); + } + rootIndividuals.add(tree); + } + } + + return rootIndividuals; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTreeWithoutTop (String ontologyId, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + Set rootIRIs = new HashSet(); + List> rootIndividuals = new ArrayList>(); + int count = 0; + if(!narrower) { + for (V1Term individual : listOfTerms) { + if (individual.annotation.get("broader") != null) { + for (String iriBroader : (LinkedHashSet) individual.annotation.get("broader")) { + V1Term broaderIndividual = findIndividual(listOfTerms,iriBroader); + if (broaderIndividual.annotation.get("broader") == null) { + rootIRIs.add(iriBroader); + } + } + } + } + + for (String iri : rootIRIs) { + V1Term topConceptIndividual = findIndividual(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptIndividual); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); + rootIndividuals.add(topConcept); + } + + } else { + for (V1Term individual : listOfTerms) { + if (individual.annotation.get("narrower") != null) { + boolean root = true; + for (V1Term indiv : listOfTerms) { + if (indiv.annotation.get("narrower") != null) { + for (String iriNarrower : (LinkedHashSet) indiv.annotation.get("narrower")) { + if (individual.iri.equals(iriNarrower)) + root = false; + } + } + } + + if(root) { + TreeNode topConcept = new TreeNode(individual); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByNarrower(individual,topConcept,listOfTerms); + rootIndividuals.add(topConcept); + } + } + } + } + + return rootIndividuals; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") + public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + V1Term topConceptIndividual = findIndividual(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptIndividual); + topConcept.setIndex(index); + if(narrower) + populateChildrenandRelatedByNarrower(topConceptIndividual,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); + + return topConcept; + } + + public V1Term findIndividual(List wholeList, String iri) { + for (V1Term individual : wholeList) + if(individual.iri.equals(iri)) + return individual; + return new V1Term(); + } + + public List findRelated(String ontologyId, String iri, String relationType, String lang) { + List related = new ArrayList(); + V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); + if (individual != null) + if (individual.annotation.get(relationType) != null) + for (String iriBroader : (LinkedHashSet) individual.annotation.get(relationType)) + related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); + + return related; + } + + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable){ + List related = new ArrayList(); + + V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); + if(individual == null) + return related; + if(individual.iri == null) + return related; + + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + for (V1Term term : listOfTerms) { + if (term != null) + if (term.annotation.get(relationType) != null) + for (String iriRelated : (LinkedHashSet) term.annotation.get(relationType)) + if(iriRelated.equals(iri)) + related.add(term); + } + + return related; + } + + public void populateChildrenandRelatedByNarrower(V1Term individual, TreeNode tree, List listOfTerms ) { + + if (individual.annotation.get("related") != null) + for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { + TreeNode related = new TreeNode(findIndividual(listOfTerms,iriRelated)); + related.setIndex(tree.getIndex()+ ".related"); + tree.addRelated(related); + } + int count = 0; + if (individual.annotation.get("narrower") != null) + for (String iriChild : (LinkedHashSet) individual.annotation.get("narrower")) { + V1Term childIndividual = findIndividual(listOfTerms,iriChild); + TreeNode child = new TreeNode(childIndividual); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByNarrower(childIndividual,child,listOfTerms); + tree.addChild(child); + } + } + + public void populateChildrenandRelatedByBroader(V1Term individual, TreeNode tree, List listOfTerms) { + if (individual.annotation.get("related") != null) + for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { + TreeNode related = new TreeNode(findIndividual(listOfTerms,iriRelated)); + related.setIndex(tree.getIndex()+ ".related"); + tree.addRelated(related); + } + int count = 0; + for ( V1Term indiv : listOfTerms) { + if (indiv.annotation.get("broader") != null) + for (String iriBroader : (LinkedHashSet) indiv.annotation.get("broader")) + if(individual.iri != null) + if (individual.iri.equals(iriBroader)) { + TreeNode child = new TreeNode(indiv); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByBroader(indiv,child,listOfTerms); + tree.addChild(child); + } + } + } + + @CacheEvict(value="concepttree", allEntries=true) + public String removeConceptTreeCache() { + return "All concept tree cache removed!"; + } } diff --git a/dataload/configs/skos_ontologies.json b/dataload/configs/skos_ontologies.json new file mode 100644 index 000000000..403f54325 --- /dev/null +++ b/dataload/configs/skos_ontologies.json @@ -0,0 +1,113 @@ +{ + "ontologies": [ + { + "title": "PhySH - Physics Subject Headings", + "preferredPrefix": "physh", + "description": "PhySH (Physics Subject Headings) is a physics classification scheme developed by APS to organize journal, meeting, and other content by topic.", + "homepage": "https://physh.org/", + "tracker": null, + "logo": null, + "annotations": null, + "oboSlims": false, + "preferredRootTerms": [], + "allowDownload": false, + "classifications": null, + "license": null, + "repoUrl": null, + "uri": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", + "id": "physh", + "mailing_list": null, + "ontology_purl": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", + "reasoner": "NONE", + "label_property": "https://physh.org/rdf/2018/01/01/core#prefLabel", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition", + "http://purl.org/dc/terms/description" + ], + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "hierarchical_property": [ + "http://www.w3.org/2004/02/skos/core#broader", + "https://physh.org/rdf/2018/01/01/core#inDiscipline", + "https://physh.org/rdf/2018/01/01/core#inFacet" + ], + "base_uri": [ + "https://doi.org/10.29172" + ] + }, + { + "title": "Unified Astronomy Thesaurus (UAT)", + "preferredPrefix": "uat", + "description": "The Unified Astronomy Thesaurus (UAT) is an open, interoperable and community-supported thesaurus which unifies existing, divergent, and isolated controlled vocabularies in astronomy and astrophysics into a single high-quality, freely-available open thesaurus formalizing astronomical concepts and their inter-relationships. The UAT builds upon the IAU Thesaurus with major contributions from the Astronomy portions of the thesauri developed by the Institute of Physics Publishing and the American Institute of Physics. The Unified Astronomy Thesaurus will be further enhanced and updated through a collaborative effort involving broad community participation.", + "homepage": "http://astrothesaurus.org", + "tracker": null, + "logo": null, + "annotations": null, + "oboSlims": false, + "preferredRootTerms": [], + "allowDownload": false, + "classifications": null, + "license": null, + "repoUrl": null, + "uri": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", + "id": "uat", + "mailing_list": "sio-ontology@googlegroups.com", + "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", + "reasoner": "NONE", + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "hierarchical_property": [ + "http://purl.obolibrary.org/obo/BFO_0000050" + ], + "base_uri": [ + "http://astrothesaurus.org/uat" + ] + }, + + + + + { + "title": "Simple Knowledge Organization System (SKOS) version of Materials Data Vocabulary", + "preferredPrefix": "mdv", + "description": "A version of the Materials Data Vocabulary structured as Simple Knowledge Organization System (SKOS). The XML was originally created by the TemaTres software. This vocabulary describes the applicability to material science of records in the NIST Materials Resource Registry (NMRR - https://materials.registry.nist.gov/). The NMRR allows for the registration of materials resources, bridging the gap between existing resources and the end users. The NMRR functions as a node in a federated system, making the registered information available for research to the materials community. This is being developed at the National Institute of Standards and Technology and is made available to solicit comments from the Material Science community. (An Excel version of the file is also included in the distributions for ease of use.) Please cite this resource as: Medina-Smith, Andrea; Becker, Chandler (2017), Simple Knowledge Organization System (SKOS) version of Materials Data Vocabulary , National Institute of Standards and Technology, https://doi.org/10.18434/T4/1435037", + "homepage": "https://data.nist.gov/od/id/67C783D4BA814C8EE05324570681708A1899", + "tracker": null, + "logo": null, + "annotations": null, + "oboSlims": false, + "preferredRootTerms": [], + "allowDownload": true, + "classifications": null, + "license": null, + "repoUrl": null, + "id": "mdv", + "ontology_purl": "https://data.nist.gov/od/dm/nmrr/vocab/", + "reasoner": "NONE", + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "creator": [ + "Andrea Medina-Smith (https://orcid.org/0000-0002-1217-701X)", + "Chandler Becker (https://orcid.org/0000-0002-3653-0199)" + ], + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "hierarchical_property": [ + "http://purl.obolibrary.org/obo/BFO_0000050" + ], + "base_uri": [ + "https://data.nist.gov/od/dm/nmrr/vocab" + ] + } + ] +} + From 5db94efdd907fe240aa8d1ef005b26d7aed8a39c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 3 Jan 2024 19:26:52 +0100 Subject: [PATCH 004/146] enabled hastopconcept option by making conceptSchema a term --- .../rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 275e6a511..c0b69a493 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -641,6 +641,7 @@ public void handleType(OntologyNode subjNode, Node type) { case "http://www.w3.org/2002/07/owl#Class": case "http://www.w3.org/2000/01/rdf-schema#Class": case "http://www.w3.org/2004/02/skos/core#Concept": + case "http://www.w3.org/2004/02/skos/core#ConceptScheme": subjNode.types.add(OntologyNode.NodeType.CLASS); if(subjNode.uri != null) { ++ numberOfClasses; From fb211c887c9d8bfa01ff3f371ba65fd891511257 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 15 Feb 2024 15:45:26 +0100 Subject: [PATCH 005/146] implemented graph definition call for visualization --- .../v1/V1OntologySKOSConceptController.java | 179 ++++++++++++++---- .../controller/api/v1/V1SearchController.java | 2 - 2 files changed, 142 insertions(+), 39 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java index 6f2dd346a..f4dbb77c3 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -1,5 +1,8 @@ package uk.ac.ebi.spot.ols.controller.api.v1; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.media.Schema; @@ -27,8 +30,7 @@ import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.List; +import java.util.*; /** * @author Simon Jupp @@ -47,7 +49,7 @@ public class V1OntologySKOSConceptController { @Autowired V1TermAssembler termAssembler; - + @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") @RequestMapping(path = "/{onto}/concepthierarchy", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity>> getSKOSConceptHierarchyByOntology( @@ -67,8 +69,8 @@ HttpEntity>> getSKOSConceptHierarchyByOntology( return new ResponseEntity<>(termRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); else return new ResponseEntity<>(termRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); - } - + } + @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") @RequestMapping(path = "/{onto}/displayconcepthierarchy", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody @@ -95,12 +97,12 @@ HttpEntity displaySKOSConceptHierarchyByOntology( StringBuilder sb = new StringBuilder(); for (TreeNode root : rootIndividuals) { sb.append(root.getIndex() + " , "+ root.getData().label + " , " + root.getData().iri).append("\n"); - sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); + sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); } - + return new HttpEntity(sb.toString()); - } - + } + @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") @RequestMapping(path = "/{onto}/concepthierarchy/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( @@ -121,11 +123,11 @@ HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( decodedIri = UriUtils.decode(iri, "UTF-8"); topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); - if (topConcept.getData().iri == null) + if (topConcept.getData().iri == null) throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); return new ResponseEntity<>(topConcept, HttpStatus.OK); - } - + } + @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") @RequestMapping(path = "/{onto}/displayconcepthierarchy/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody @@ -149,13 +151,13 @@ HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( StringBuilder sb = new StringBuilder(); decodedIri = UriUtils.decode(iri, "UTF-8"); topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); - + sb.append(topConcept.getIndex() + " , "+ topConcept.getData().label + " , " + topConcept.getData().iri).append("\n"); - sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); - + sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); + return new HttpEntity(sb.toString()); - } - + } + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") @RequestMapping(path = "/{onto}/conceptrelations/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity> findRelatedConcepts( @@ -164,13 +166,13 @@ public HttpEntity> findRelatedConcepts( @Parameter(description = "encoded concept IRI", required = true) @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) - @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, Pageable pageable, PagedResourcesAssembler assembler) { - + ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); @@ -180,10 +182,63 @@ public HttpEntity> findRelatedConcepts( final int start = (int)pageable.getOffset(); final int end = Math.min((start + pageable.getPageSize()), related.size()); Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); - - return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); + + return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); } + + @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") + @RequestMapping(path = "/{onto}/graph/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity retrieveImmediateGraph( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + + V1Term subjectTerm = termRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + + related = termRepository.findRelated(ontologyId, decodedIri, "related",lang); + + List narrower = new ArrayList(); + narrower = termRepository.findRelated(ontologyId, decodedIri, "narrower",lang); + + List broader = new ArrayList(); + broader = termRepository.findRelated(ontologyId, decodedIri, "broader",lang); + + Set relatedNodes = new HashSet(); + related.forEach(term -> relatedNodes.add(new Node(term.iri, term.label))); + Set narrowerNodes = new HashSet(); + narrower.forEach(term -> narrowerNodes.add(new Node(term.iri, term.label))); + Set broaderNodes = new HashSet(); + broader.forEach(term -> broaderNodes.add(new Node(term.iri, term.label))); + + Set edges = new HashSet(); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); + + Set nodes = new HashSet(); + nodes.add(new Node(decodedIri,subjectTerm.label)); + nodes.addAll(relatedNodes); + nodes.addAll(broaderNodes); + nodes.addAll(narrowerNodes); + + + Map graph = new HashMap(); + graph.put("nodes", nodes); + graph.put("edges", edges); + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + try { + return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") @RequestMapping(path = "/{onto}/displayconceptrelations/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody @@ -193,7 +248,7 @@ public HttpEntity displayRelatedConcepts( @Parameter(description = "encoded concept IRI", required = true) @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) - @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, @@ -211,11 +266,11 @@ public HttpEntity displayRelatedConcepts( int count = 0; for (V1Term individual : conceptPage.getContent()) sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); - - return new HttpEntity<>( sb.toString()); + + return new HttpEntity<>( sb.toString()); } - + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") @RequestMapping(path = "/{onto}/conceptrelationsindirectly/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity> findRelatedConceptsIndirectly( @@ -224,21 +279,21 @@ public HttpEntity> findRelatedConceptsIndirectly( @Parameter(description = "encoded concept IRI", required = true) @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) - @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, Pageable pageable) { - + ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); - - return new ResponseEntity<>( related, HttpStatus.OK); + + return new ResponseEntity<>( related, HttpStatus.OK); } - + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") @RequestMapping(path = "/{onto}/displayconceptrelationsindirectly/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody @@ -248,7 +303,7 @@ public HttpEntity displayRelatedConceptsIndirectly( @Parameter(description = "encoded concept IRI", required = true) @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) - @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, @Parameter(description = "Page size to retrieve individuals", required = true) @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, @@ -259,16 +314,16 @@ public HttpEntity displayRelatedConceptsIndirectly( List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); - + int count = 0; for (V1Term individual : related) sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); - - return new ResponseEntity<>( sb.toString(), HttpStatus.OK); + + return new ResponseEntity<>( sb.toString(), HttpStatus.OK); } - + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { StringBuilder sb = new StringBuilder(); for (TreeNode childConcept : rootConcept.getChildren()) { @@ -282,7 +337,7 @@ public StringBuilder generateConceptHierarchyTextByOntology(TreeNode roo } return sb; } - + @RequestMapping(method = RequestMethod.GET, produces = {MediaType.TEXT_PLAIN_VALUE}, value = "/removeConceptTreeCache") public HttpEntity removeConceptTreeCache() { return new HttpEntity(termRepository.removeConceptTreeCache()); @@ -291,6 +346,56 @@ public HttpEntity removeConceptTreeCache() { @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { - } + } + + public class Node { + String iri; + String label; + + public Node(String iri, String label) { + this.iri = iri; + this.label = label; + } + + public String getIri() { + return iri; + } + + public String getLabel() { + return label; + } + + } + + public class Edge { + String source; + String target; + String label; + String uri; + + public Edge(String source, String target, String label, String uri) { + this.source = source; + this.target = target; + this.label = label; + this.uri = uri; + } + + public String getSource() { + return source; + } + + public String getTarget() { + return target; + } + + public String getLabel() { + return label; + } + + public String getUri() { + return uri; + } + + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index e4c0273a0..a55598567 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -5,12 +5,10 @@ import com.google.gson.JsonParser; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; -import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; From d2df44f8ee3c7426c743dde213e8242e558308aa Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:35:39 +0100 Subject: [PATCH 006/146] removed yet to be introduced caching for now and moved graph method below --- .../v1/V1OntologySKOSConceptController.java | 109 +++++++++--------- .../ols/repository/v1/V1TermRepository.java | 106 ++++++++--------- 2 files changed, 102 insertions(+), 113 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java index f4dbb77c3..c30ea350f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -187,58 +187,6 @@ public HttpEntity> findRelatedConcepts( } - @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") - @RequestMapping(path = "/{onto}/graph/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) - public HttpEntity retrieveImmediateGraph( - @Parameter(description = "ontology ID", required = true) - @PathVariable("onto") String ontologyId, - @Parameter(description = "encoded concept IRI", required = true) - @PathVariable("iri") String iri, - @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ - - List related = new ArrayList(); - String decodedIri = UriUtils.decode(iri, "UTF-8"); - - V1Term subjectTerm = termRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); - - related = termRepository.findRelated(ontologyId, decodedIri, "related",lang); - - List narrower = new ArrayList(); - narrower = termRepository.findRelated(ontologyId, decodedIri, "narrower",lang); - - List broader = new ArrayList(); - broader = termRepository.findRelated(ontologyId, decodedIri, "broader",lang); - - Set relatedNodes = new HashSet(); - related.forEach(term -> relatedNodes.add(new Node(term.iri, term.label))); - Set narrowerNodes = new HashSet(); - narrower.forEach(term -> narrowerNodes.add(new Node(term.iri, term.label))); - Set broaderNodes = new HashSet(); - broader.forEach(term -> broaderNodes.add(new Node(term.iri, term.label))); - - Set edges = new HashSet(); - relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); - narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); - broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); - - Set nodes = new HashSet(); - nodes.add(new Node(decodedIri,subjectTerm.label)); - nodes.addAll(relatedNodes); - nodes.addAll(broaderNodes); - nodes.addAll(narrowerNodes); - - - Map graph = new HashMap(); - graph.put("nodes", nodes); - graph.put("edges", edges); - ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); - try { - return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); - } catch (JsonProcessingException e) { - throw new RuntimeException(e); - } - } - @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") @RequestMapping(path = "/{onto}/displayconceptrelations/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody @@ -324,6 +272,58 @@ public HttpEntity displayRelatedConceptsIndirectly( } + @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") + @RequestMapping(path = "/{onto}/graph/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity retrieveImmediateGraph( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + + V1Term subjectTerm = termRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + + related = termRepository.findRelated(ontologyId, decodedIri, "related",lang); + + List narrower = new ArrayList(); + narrower = termRepository.findRelated(ontologyId, decodedIri, "narrower",lang); + + List broader = new ArrayList(); + broader = termRepository.findRelated(ontologyId, decodedIri, "broader",lang); + + Set relatedNodes = new HashSet(); + related.forEach(term -> relatedNodes.add(new Node(term.iri, term.label))); + Set narrowerNodes = new HashSet(); + narrower.forEach(term -> narrowerNodes.add(new Node(term.iri, term.label))); + Set broaderNodes = new HashSet(); + broader.forEach(term -> broaderNodes.add(new Node(term.iri, term.label))); + + Set edges = new HashSet(); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); + + Set nodes = new HashSet(); + nodes.add(new Node(decodedIri,subjectTerm.label)); + nodes.addAll(relatedNodes); + nodes.addAll(broaderNodes); + nodes.addAll(narrowerNodes); + + + Map graph = new HashMap(); + graph.put("nodes", nodes); + graph.put("edges", edges); + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + try { + return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { StringBuilder sb = new StringBuilder(); for (TreeNode childConcept : rootConcept.getChildren()) { @@ -338,11 +338,6 @@ public StringBuilder generateConceptHierarchyTextByOntology(TreeNode roo return sb; } - @RequestMapping(method = RequestMethod.GET, produces = {MediaType.TEXT_PLAIN_VALUE}, value = "/removeConceptTreeCache") - public HttpEntity removeConceptTreeCache() { - return new HttpEntity(termRepository.removeConceptTreeCache()); - } - @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java index ec078c81d..3a34480e2 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java @@ -297,23 +297,23 @@ public Page findAllByOboIdAndIsDefiningOntology(String oboId, String lan public Page getInstances(String ontologyId, String iri, Pageable pageable) { throw new RuntimeException(); } - - - - @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + + + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); List listOfTerms = new ArrayList(); - listOfTerms.addAll(terms.getContent()); - + listOfTerms.addAll(terms.getContent()); + while(terms.hasNext()) { terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); listOfTerms.addAll(terms.getContent()); - } - - List> rootIndividuals = new ArrayList>(); + } + + List> rootIndividuals = new ArrayList>(); int count = 0; - + if(schema) { for (V1Term indiv : listOfTerms) if (indiv.annotation.get("hasTopConcept") != null) { @@ -329,10 +329,10 @@ public List> conceptTree (String ontologyId, boolean schema, bo } rootIndividuals.add(topConcept); } - } + } } else for (V1Term individual : listOfTerms) { TreeNode tree = new TreeNode(individual); - + if (tree.isRoot() && individual.annotation.get("topConceptOf") != null) { tree.setIndex(String.valueOf(++count)); if(withChildren) { @@ -343,22 +343,22 @@ public List> conceptTree (String ontologyId, boolean schema, bo } rootIndividuals.add(tree); } - } - + } + return rootIndividuals; } - + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") public List> conceptTreeWithoutTop (String ontologyId, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); List listOfTerms = new ArrayList(); - listOfTerms.addAll(terms.getContent()); - + listOfTerms.addAll(terms.getContent()); + while(terms.hasNext()) { terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); listOfTerms.addAll(terms.getContent()); - } - + } + Set rootIRIs = new HashSet(); List> rootIndividuals = new ArrayList>(); int count = 0; @@ -369,11 +369,11 @@ public List> conceptTreeWithoutTop (String ontologyId, boolean V1Term broaderIndividual = findIndividual(listOfTerms,iriBroader); if (broaderIndividual.annotation.get("broader") == null) { rootIRIs.add(iriBroader); - } + } } } } - + for (String iri : rootIRIs) { V1Term topConceptIndividual = findIndividual(listOfTerms, iri); TreeNode topConcept = new TreeNode(topConceptIndividual); @@ -382,7 +382,7 @@ public List> conceptTreeWithoutTop (String ontologyId, boolean populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); rootIndividuals.add(topConcept); } - + } else { for (V1Term individual : listOfTerms) { if (individual.annotation.get("narrower") != null) { @@ -393,9 +393,9 @@ public List> conceptTreeWithoutTop (String ontologyId, boolean if (individual.iri.equals(iriNarrower)) root = false; } - } + } } - + if(root) { TreeNode topConcept = new TreeNode(individual); topConcept.setIndex(String.valueOf(++count)); @@ -406,22 +406,22 @@ public List> conceptTreeWithoutTop (String ontologyId, boolean } } } - + return rootIndividuals; } - + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable){ Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); List listOfTerms = new ArrayList(); - listOfTerms.addAll(terms.getContent()); - + listOfTerms.addAll(terms.getContent()); + while(terms.hasNext()) { terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); listOfTerms.addAll(terms.getContent()); } - V1Term topConceptIndividual = findIndividual(listOfTerms,iri); + V1Term topConceptIndividual = findIndividual(listOfTerms,iri); TreeNode topConcept = new TreeNode(topConceptIndividual); topConcept.setIndex(index); if(narrower) @@ -431,56 +431,56 @@ public TreeNode conceptSubTree(String ontologyId, String iri, boolean na return topConcept; } - + public V1Term findIndividual(List wholeList, String iri) { for (V1Term individual : wholeList) if(individual.iri.equals(iri)) return individual; return new V1Term(); } - + public List findRelated(String ontologyId, String iri, String relationType, String lang) { - List related = new ArrayList(); + List related = new ArrayList(); V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); if (individual != null) if (individual.annotation.get(relationType) != null) - for (String iriBroader : (LinkedHashSet) individual.annotation.get(relationType)) + for (String iriBroader : (LinkedHashSet) individual.annotation.get(relationType)) related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); - + return related; } - + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable){ - List related = new ArrayList(); - + List related = new ArrayList(); + V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); if(individual == null) return related; if(individual.iri == null) return related; - + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); List listOfTerms = new ArrayList(); - listOfTerms.addAll(terms.getContent()); - + listOfTerms.addAll(terms.getContent()); + while(terms.hasNext()) { terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); listOfTerms.addAll(terms.getContent()); - } - + } + for (V1Term term : listOfTerms) { if (term != null) if (term.annotation.get(relationType) != null) - for (String iriRelated : (LinkedHashSet) term.annotation.get(relationType)) + for (String iriRelated : (LinkedHashSet) term.annotation.get(relationType)) if(iriRelated.equals(iri)) related.add(term); } - + return related; } - + public void populateChildrenandRelatedByNarrower(V1Term individual, TreeNode tree, List listOfTerms ) { - + if (individual.annotation.get("related") != null) for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { TreeNode related = new TreeNode(findIndividual(listOfTerms,iriRelated)); @@ -492,12 +492,12 @@ public void populateChildrenandRelatedByNarrower(V1Term individual, TreeNode) individual.annotation.get("narrower")) { V1Term childIndividual = findIndividual(listOfTerms,iriChild); TreeNode child = new TreeNode(childIndividual); - child.setIndex(tree.getIndex()+"."+ ++count); + child.setIndex(tree.getIndex()+"."+ ++count); populateChildrenandRelatedByNarrower(childIndividual,child,listOfTerms); tree.addChild(child); } } - + public void populateChildrenandRelatedByBroader(V1Term individual, TreeNode tree, List listOfTerms) { if (individual.annotation.get("related") != null) for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { @@ -512,16 +512,10 @@ public void populateChildrenandRelatedByBroader(V1Term individual, TreeNode child = new TreeNode(indiv); - child.setIndex(tree.getIndex()+"."+ ++count); + child.setIndex(tree.getIndex()+"."+ ++count); populateChildrenandRelatedByBroader(indiv,child,listOfTerms); tree.addChild(child); - } + } } } - - @CacheEvict(value="concepttree", allEntries=true) - public String removeConceptTreeCache() { - return "All concept tree cache removed!"; - } - } From c7f8de2bd6c973658c06e21d429bd536c78ade3a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 16 Feb 2024 10:20:48 +0100 Subject: [PATCH 007/146] renamed call signatures with respect to EBI conventions in #625 --- .../v1/V1OntologySKOSConceptController.java | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java index c30ea350f..44415e81c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -51,7 +51,7 @@ public class V1OntologySKOSConceptController { V1TermAssembler termAssembler; @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") - @RequestMapping(path = "/{onto}/concepthierarchy", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity>> getSKOSConceptHierarchyByOntology( @Parameter(description = "ontology ID", required = true) @PathVariable("onto") String ontologyId, @@ -72,7 +72,7 @@ HttpEntity>> getSKOSConceptHierarchyByOntology( } @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") - @RequestMapping(path = "/{onto}/displayconcepthierarchy", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody HttpEntity displaySKOSConceptHierarchyByOntology( @Parameter(description = "ontology ID", required = true) @@ -104,7 +104,7 @@ HttpEntity displaySKOSConceptHierarchyByOntology( } @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") - @RequestMapping(path = "/{onto}/concepthierarchy/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( @Parameter(description = "ontology ID", required = true) @PathVariable("onto") String ontologyId, @@ -129,7 +129,7 @@ HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( } @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") - @RequestMapping(path = "/{onto}/displayconcepthierarchy/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( @Parameter(description = "ontology ID", required = true) @@ -159,7 +159,7 @@ HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( } @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") - @RequestMapping(path = "/{onto}/conceptrelations/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/relations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity> findRelatedConcepts( @Parameter(description = "ontology ID", required = true) @PathVariable("onto") String ontologyId, @@ -188,7 +188,7 @@ public HttpEntity> findRelatedConcepts( } @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") - @RequestMapping(path = "/{onto}/displayconceptrelations/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/displayrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody public HttpEntity displayRelatedConcepts( @Parameter(description = "ontology ID", required = true) @@ -220,7 +220,7 @@ public HttpEntity displayRelatedConcepts( } @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") - @RequestMapping(path = "/{onto}/conceptrelationsindirectly/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/indirectrelations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity> findRelatedConceptsIndirectly( @Parameter(description = "ontology ID", required = true) @PathVariable("onto") String ontologyId, @@ -243,7 +243,7 @@ public HttpEntity> findRelatedConceptsIndirectly( } @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") - @RequestMapping(path = "/{onto}/displayconceptrelationsindirectly/{iri}", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/displayindirectrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) @ResponseBody public HttpEntity displayRelatedConceptsIndirectly( @Parameter(description = "ontology ID", required = true) @@ -273,7 +273,7 @@ public HttpEntity displayRelatedConceptsIndirectly( } @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") - @RequestMapping(path = "/{onto}/graph/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @RequestMapping(path = "/{onto}/skos/{iri}/graph", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity retrieveImmediateGraph( @Parameter(description = "ontology ID", required = true) @PathVariable("onto") String ontologyId, From 5080ff81432e664f0eb74d097e8737e9da232279 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 19 Feb 2024 17:44:13 +0100 Subject: [PATCH 008/146] handled null relations for some ontologies in EBISPOT#625 and TIBHannover#1 --- .../ols/repository/v1/V1TermRepository.java | 44 ++-- dataload/configs/skos_ontologies.json | 237 ++++++++++-------- 2 files changed, 153 insertions(+), 128 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java index 3a34480e2..3b8e72246 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java @@ -481,34 +481,34 @@ public List findRelated(String ontologyId, String iri, String relationTy public void populateChildrenandRelatedByNarrower(V1Term individual, TreeNode tree, List listOfTerms ) { - if (individual.annotation.get("related") != null) - for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { - TreeNode related = new TreeNode(findIndividual(listOfTerms,iriRelated)); - related.setIndex(tree.getIndex()+ ".related"); - tree.addRelated(related); - } + if (individual.annotation != null) + for (String iriRelated : (LinkedHashSet) individual.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findIndividual(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } int count = 0; - if (individual.annotation.get("narrower") != null) - for (String iriChild : (LinkedHashSet) individual.annotation.get("narrower")) { - V1Term childIndividual = findIndividual(listOfTerms,iriChild); - TreeNode child = new TreeNode(childIndividual); - child.setIndex(tree.getIndex()+"."+ ++count); - populateChildrenandRelatedByNarrower(childIndividual,child,listOfTerms); - tree.addChild(child); - } + if (individual.annotation != null) + for (String iriChild : (LinkedHashSet) individual.annotation.getOrDefault("narrower", new LinkedHashSet())) { + V1Term childIndividual = findIndividual(listOfTerms, iriChild); + TreeNode child = new TreeNode(childIndividual); + child.setIndex(tree.getIndex() + "." + ++count); + populateChildrenandRelatedByNarrower(childIndividual, child, listOfTerms); + tree.addChild(child); + } } public void populateChildrenandRelatedByBroader(V1Term individual, TreeNode tree, List listOfTerms) { - if (individual.annotation.get("related") != null) - for (String iriRelated : (LinkedHashSet) individual.annotation.get("related")) { - TreeNode related = new TreeNode(findIndividual(listOfTerms,iriRelated)); - related.setIndex(tree.getIndex()+ ".related"); - tree.addRelated(related); - } + if (individual.annotation != null) + for (String iriRelated : (LinkedHashSet) individual.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findIndividual(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } int count = 0; for ( V1Term indiv : listOfTerms) { - if (indiv.annotation.get("broader") != null) - for (String iriBroader : (LinkedHashSet) indiv.annotation.get("broader")) + if (indiv.annotation != null) + for (String iriBroader : (LinkedHashSet) indiv.annotation.getOrDefault("broader",new LinkedHashSet())) if(individual.iri != null) if (individual.iri.equals(iriBroader)) { TreeNode child = new TreeNode(indiv); diff --git a/dataload/configs/skos_ontologies.json b/dataload/configs/skos_ontologies.json index 403f54325..37afcda80 100644 --- a/dataload/configs/skos_ontologies.json +++ b/dataload/configs/skos_ontologies.json @@ -1,113 +1,138 @@ { "ontologies": [ { - "title": "PhySH - Physics Subject Headings", - "preferredPrefix": "physh", - "description": "PhySH (Physics Subject Headings) is a physics classification scheme developed by APS to organize journal, meeting, and other content by topic.", - "homepage": "https://physh.org/", - "tracker": null, - "logo": null, - "annotations": null, - "oboSlims": false, - "preferredRootTerms": [], - "allowDownload": false, - "classifications": null, - "license": null, - "repoUrl": null, - "uri": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", - "id": "physh", - "mailing_list": null, - "ontology_purl": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", - "reasoner": "NONE", - "label_property": "https://physh.org/rdf/2018/01/01/core#prefLabel", - "definition_property": [ - "http://www.w3.org/2004/02/skos/core#definition", - "http://purl.org/dc/terms/description" - ], - "synonym_property": [ - "http://www.w3.org/2004/02/skos/core#altLabel" - ], - "hierarchical_property": [ - "http://www.w3.org/2004/02/skos/core#broader", - "https://physh.org/rdf/2018/01/01/core#inDiscipline", - "https://physh.org/rdf/2018/01/01/core#inFacet" - ], - "base_uri": [ - "https://doi.org/10.29172" - ] - }, + "ontology_purl": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", + "description": "PhySH (Physics Subject Headings) is a physics classification scheme developed by APS to organize journal, meeting, and other content by topic.", + "homepage": "https://physh.org/", + "id": "PhySH", + "license": { + "label": "CC-0 1.0", + "url": "https://creativecommons.org/publicdomain/zero/1.0/" + }, + "title": "PhySH - Physics Subject Headings", + "tracker": "https://github.com/physh-org/PhySH/issues", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition", + "http://purl.org/dc/terms/description" + ], + "creator": [ + "American Physical Society (https://www.aps.org/)" + ], + "preferredPrefix": "physh", + "hierarchical_property": [ + "http://www.w3.org/2004/02/skos/core#broader", + "https://physh.org/rdf/2018/01/01/core#inDiscipline", + "https://physh.org/rdf/2018/01/01/core#inFacet" + ], + "label_property": "https://physh.org/rdf/2018/01/01/core#prefLabel", + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "base_uri": [ + "https://doi.org/10.29172" + ], + "repo_url": "https://github.com/physh-org/PhySH", + "skos": true +}, { - "title": "Unified Astronomy Thesaurus (UAT)", - "preferredPrefix": "uat", - "description": "The Unified Astronomy Thesaurus (UAT) is an open, interoperable and community-supported thesaurus which unifies existing, divergent, and isolated controlled vocabularies in astronomy and astrophysics into a single high-quality, freely-available open thesaurus formalizing astronomical concepts and their inter-relationships. The UAT builds upon the IAU Thesaurus with major contributions from the Astronomy portions of the thesauri developed by the Institute of Physics Publishing and the American Institute of Physics. The Unified Astronomy Thesaurus will be further enhanced and updated through a collaborative effort involving broad community participation.", - "homepage": "http://astrothesaurus.org", - "tracker": null, - "logo": null, - "annotations": null, - "oboSlims": false, - "preferredRootTerms": [], - "allowDownload": false, - "classifications": null, - "license": null, - "repoUrl": null, - "uri": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", - "id": "uat", - "mailing_list": "sio-ontology@googlegroups.com", - "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", - "reasoner": "NONE", - "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", - "definition_property": [ - "http://www.w3.org/2004/02/skos/core#definition" - ], - "synonym_property": [ - "http://www.w3.org/2004/02/skos/core#altLabel" - ], - "hierarchical_property": [ - "http://purl.obolibrary.org/obo/BFO_0000050" - ], - "base_uri": [ - "http://astrothesaurus.org/uat" - ] - }, - - - - - { - "title": "Simple Knowledge Organization System (SKOS) version of Materials Data Vocabulary", - "preferredPrefix": "mdv", - "description": "A version of the Materials Data Vocabulary structured as Simple Knowledge Organization System (SKOS). The XML was originally created by the TemaTres software. This vocabulary describes the applicability to material science of records in the NIST Materials Resource Registry (NMRR - https://materials.registry.nist.gov/). The NMRR allows for the registration of materials resources, bridging the gap between existing resources and the end users. The NMRR functions as a node in a federated system, making the registered information available for research to the materials community. This is being developed at the National Institute of Standards and Technology and is made available to solicit comments from the Material Science community. (An Excel version of the file is also included in the distributions for ease of use.) Please cite this resource as: Medina-Smith, Andrea; Becker, Chandler (2017), Simple Knowledge Organization System (SKOS) version of Materials Data Vocabulary , National Institute of Standards and Technology, https://doi.org/10.18434/T4/1435037", - "homepage": "https://data.nist.gov/od/id/67C783D4BA814C8EE05324570681708A1899", - "tracker": null, - "logo": null, - "annotations": null, - "oboSlims": false, - "preferredRootTerms": [], - "allowDownload": true, - "classifications": null, - "license": null, - "repoUrl": null, - "id": "mdv", - "ontology_purl": "https://data.nist.gov/od/dm/nmrr/vocab/", - "reasoner": "NONE", - "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", - "creator": [ - "Andrea Medina-Smith (https://orcid.org/0000-0002-1217-701X)", - "Chandler Becker (https://orcid.org/0000-0002-3653-0199)" - ], - "definition_property": [ - "http://www.w3.org/2004/02/skos/core#definition" - ], - "synonym_property": [ - "http://www.w3.org/2004/02/skos/core#altLabel" - ], - "hierarchical_property": [ - "http://purl.obolibrary.org/obo/BFO_0000050" - ], - "base_uri": [ - "https://data.nist.gov/od/dm/nmrr/vocab" - ] - } + "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", + "title": "Unified Astronomy Thesaurus (UAT)", + "id": "uat", + "preferredPrefix": "uat", + "license": { + "label": "Creative Commons Attribution-ShareAlike 3.0 Unported License", + "url": "https://github.com/astrothesaurus/UAT/blob/master/LICENSE.md" + }, + "mailing_list": "sio-ontology@googlegroups.com", + "description": "The Unified Astronomy Thesaurus (UAT) is an open, interoperable and community-supported thesaurus which unifies existing, divergent, and isolated controlled vocabularies in astronomy and astrophysics into a single high-quality, freely-available open thesaurus formalizing astronomical concepts and their inter-relationships. The UAT builds upon the IAU Thesaurus with major contributions from the Astronomy portions of the thesauri developed by the Institute of Physics Publishing and the American Institute of Physics. The Unified Astronomy Thesaurus will be further enhanced and updated through a collaborative effort involving broad community participation.", + "homepage": "http://astrothesaurus.org", + "creator": [ + "Frey Katie" + ], + "is_foundary": false, + "tracker": "https://github.com/astrothesaurus/UAT/issues", + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "base_uri": [ + "http://astrothesaurus.org/uat" + ], + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "repo_url": "https://github.com/astrothesaurus/UAT", + "skos": true +}, +{ + "ontology_purl": "https://raw.githubusercontent.com/dini-ag-kim/hochschulfaechersystematik/master/hochschulfaechersystematik.ttl", + "id": "hsfs", + "title": "Hochschulfächersystematik", + "description": "Diese Hochschulfächersystematik basiert auf der Destatis-Systematik der Fächergruppen, Studienbereiche und Studienfächer (http://bartoc.org/node/18919) und wird gepflegt von der OER-Metadatengruppe der DINI-AG KIM. Die Systematik ist Bestandteil der Spezifikationen LOM for Higher Education OER Repositories und LRMI-Profil (Entwurf).", + "repo_url": "https://github.com/dini-ag-kim/hochschulfaechersystematik", + "preferredPrefix": "hsfs", + "allow_download": true, + "homepage": "https://bartoc.org/en/node/18919", + "base_uri": [ + "https://w3id.org/kim/hochschulfaechersystematik/" + ], + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "skos": true, + "creator": [ + "Michael Menzel", + "Adrian Pohl" + ], + "license": { + "label": "freely available", + "url": "http://bartoc.org/en/Access/Free" + } +}, +{ + "ontology_purl": "https://purl.org/fidbaudigital/subjects", + "title": "FID BAUdigital Subject Headings", + "id": "bdsubj", + "preferredPrefix": "bdsubj", + "license": { + "label": "CC-BY 4.0", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "description": "This subject heading system has beeen developed for use in FID BAUdigital and its future web services. It covers scientific fields of Civil Engineering, Architecture and Urban Studies with a special section on digitization. This subject classification has been mapped to several other classification systems. The latest version of the subject classification including these mappings is available at https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation/-/raw/main/Subject_Headings_all_mappings.owl.", + "homepage": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation", + "tracker": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation/-/issues", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "creator": [ + "Fraunhofer-Informationszentrum Raum und Bau IRB" + ], + "base_uri": [ + "https://purl.org/fidbaudigital/subjects" + ], + "skos": true, + "repo_url": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation" +}, +{ + "ontology_purl": "https://vocabs-downloads.acdh.oeaw.ac.at/vocabs-main/GeneralConcepts/OeFOS/oefos_disciplines.ttl", + "id": "oefos", + "license": { + "label": "Creative Commons Attribution 4.0 International License.", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "title": "The Austrian Fields of Science and Technology Classification (ÖFOS 2012)", + "description": "The Austrian Fields of Science and Technology Classification (ÖFOS 2012) is the Austrian version of the revised international Fields of Science and Technology Classification of the OECD (FOS) published in the Frascati Manual 2015 as Fields of Research and Development (FORD). These fields are adjusted to national needs, whose application for international comparisons is binding, particularly within the European Statistical System. The six major Fields of Science: Natural Sciences; Technical Sciences; Human Medicine, Health Sciences; Agricultural Sciences, Veterinary Medicine; Social Sciences and Humanities remained unchanged in comparison to ÖFOS 2002. In order to ensure international comparability, the previous 2-digit levels from 2002, which are no longer applicable, were replaced by new 3-digit levels (groups) according to the international FOS respectively FORD. These 3-digit levels were provided with further sub-groups (4-digits) taking into account the comments of the international classification. It is therefore feasible that the new Austrian Fields of Science adapt to national peculiarities of the Austrian research activities. The research area with the corresponding 6-digits in alphabetical order serves as a description of the fields of activities and research projects and/or for the coverage of the main scientific activities of a statistical unit in the research and development surveys. (Current revision status: August 2017)", + "homepage": "https://vocabs.dariah.eu/oefos/en/", + "base_uri": [ + "https://vocabs.acdh.oeaw.ac.at/oefosdisciplines/" + ], + "allow_download": true, + "preferredPrefix": "oefos", + "skos": true, + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "creator": [ + "Christoph Hoffmann" + ] +} ] } From c249078582a83562bd2a304fb18d17e3aafce456 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 20 Feb 2024 15:58:30 +0100 Subject: [PATCH 009/146] changed skos variable name in the config file --- dataload/configs/skos_ontologies.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dataload/configs/skos_ontologies.json b/dataload/configs/skos_ontologies.json index 37afcda80..6aa93a543 100644 --- a/dataload/configs/skos_ontologies.json +++ b/dataload/configs/skos_ontologies.json @@ -32,7 +32,7 @@ "https://doi.org/10.29172" ], "repo_url": "https://github.com/physh-org/PhySH", - "skos": true + "isSkos": true }, { "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", @@ -62,7 +62,7 @@ "http://www.w3.org/2004/02/skos/core#definition" ], "repo_url": "https://github.com/astrothesaurus/UAT", - "skos": true + "isSkos": true }, { "ontology_purl": "https://raw.githubusercontent.com/dini-ag-kim/hochschulfaechersystematik/master/hochschulfaechersystematik.ttl", @@ -77,7 +77,7 @@ "https://w3id.org/kim/hochschulfaechersystematik/" ], "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", - "skos": true, + "isSkos": true, "creator": [ "Michael Menzel", "Adrian Pohl" @@ -109,7 +109,7 @@ "base_uri": [ "https://purl.org/fidbaudigital/subjects" ], - "skos": true, + "isSkos": true, "repo_url": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation" }, { @@ -127,7 +127,7 @@ ], "allow_download": true, "preferredPrefix": "oefos", - "skos": true, + "isSkos": true, "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", "creator": [ "Christoph Hoffmann" From 0f88d25d509d07881573b123ca22e5ae9902971d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 20 Feb 2024 18:45:09 +0100 Subject: [PATCH 010/146] added skos tree configuration parameters for frontend in EBISPOT#625 and TIBHannover#1 --- .../ols/controller/api/v1/TopConceptEnum.java | 2 +- .../spot/ols/model/v1/V1OntologyConfig.java | 5 +++++ .../v1/mappers/V1OntologyMapper.java | 8 ++++++-- dataload/configs/skos_ontologies.json | 18 ++++++++++++++---- 4 files changed, 26 insertions(+), 7 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java index 95aceccbc..3a86e8c58 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java @@ -3,5 +3,5 @@ public enum TopConceptEnum { SCHEMA, TOPCONCEPTOF_PROPERTY, - RELATIONSHIPS, + RELATIONSHIPS } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index cbd2db376..79c6d23d7 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -1,6 +1,7 @@ package uk.ac.ebi.spot.ols.model.v1; import com.google.gson.annotations.SerializedName; +import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; import java.util.Collection; import java.util.HashSet; @@ -47,5 +48,9 @@ public class V1OntologyConfig { public Collection preferredRootTerms = new HashSet<>(); public boolean isSkos; + public boolean skosNarrower; + + public TopConceptEnum skosRoot; + public boolean allowDownload; } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 22f62e4db..0aa47ffab 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -3,6 +3,7 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v1.V1OntologyConfig; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; @@ -61,9 +62,12 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.preferredRootTerms = JsonHelper.getStrings(localizedJson, "preferredRootTerms"); ontology.config.isSkos = localizedJson.has("isSkos") && localizedJson.get("isSkos").getAsBoolean(); + if(ontology.config.isSkos) { + ontology.config.skosNarrower = localizedJson.has("skosNarrower") && localizedJson.get("skosNarrower").getAsBoolean(); + if (localizedJson.has("skosRoot")) + ontology.config.skosRoot = TopConceptEnum.valueOf(localizedJson.get("skosRoot").getAsString()); + } ontology.config.allowDownload = localizedJson.has("allowDownload") && localizedJson.get("allowDownload").getAsBoolean(); - - ontology.status = "LOADED"; ontology.numberOfTerms = Integer.parseInt(JsonHelper.getString(localizedJson, "numberOfClasses")); diff --git a/dataload/configs/skos_ontologies.json b/dataload/configs/skos_ontologies.json index 6aa93a543..15f626247 100644 --- a/dataload/configs/skos_ontologies.json +++ b/dataload/configs/skos_ontologies.json @@ -32,8 +32,10 @@ "https://doi.org/10.29172" ], "repo_url": "https://github.com/physh-org/PhySH", - "isSkos": true -}, + "isSkos": true, + "skosNarrower": false, + "skosRoot": "RELATIONSHIPS" + }, { "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", "title": "Unified Astronomy Thesaurus (UAT)", @@ -62,8 +64,10 @@ "http://www.w3.org/2004/02/skos/core#definition" ], "repo_url": "https://github.com/astrothesaurus/UAT", - "isSkos": true -}, + "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY" + }, { "ontology_purl": "https://raw.githubusercontent.com/dini-ag-kim/hochschulfaechersystematik/master/hochschulfaechersystematik.ttl", "id": "hsfs", @@ -78,6 +82,8 @@ ], "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY", "creator": [ "Michael Menzel", "Adrian Pohl" @@ -110,6 +116,8 @@ "https://purl.org/fidbaudigital/subjects" ], "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY", "repo_url": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation" }, { @@ -128,6 +136,8 @@ "allow_download": true, "preferredPrefix": "oefos", "isSkos": true, + "skosNarrower": false, + "skosRoot": "SCHEMA", "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", "creator": [ "Christoph Hoffmann" From 4fd8952dd8541a865263f9f9ed6a80130d75fd14 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 9 Jan 2024 19:59:54 +0100 Subject: [PATCH 011/146] added ontology filter methods based on ontology metadata --- .../api/v1/V1OntologyController.java | 115 +++++++++++++++++- .../spot/ols/model/v1/V1OntologyConfig.java | 2 + 2 files changed, 116 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index a7bb975fc..95cf90b11 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -4,6 +4,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.RepositoryLinksResource; import org.springframework.data.rest.webmvc.ResourceNotFoundException; @@ -19,10 +20,13 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; -import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; + +import io.swagger.v3.oas.annotations.Parameter; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; +import java.lang.reflect.*; +import java.util.*; import javax.servlet.http.HttpServletRequest; @@ -79,6 +83,115 @@ HttpEntity> getOntology( return new ResponseEntity<>( documentAssembler.toModel(document), HttpStatus.OK); } + @RequestMapping(path = "/filterby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getOntologiesByMetadata( + @RequestParam(value = "schema", required = true) Collection schemas, + @RequestParam(value = "classification", required = true) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler + ) throws ResourceNotFoundException { + Set tempSet = new HashSet<>(); + if(exclusive) + tempSet.addAll(exclusiveFilter(schemas,classifications,pageable,lang)); + else + tempSet.addAll(filter(schemas,classifications,pageable,lang)); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + + return new ResponseEntity<>( assembler.toModel(document, documentAssembler), HttpStatus.OK); + } + + public Set filter(Collection schemas, Collection classifications, Pageable pageable, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + Page document = ontologyRepository.getAll(lang, pageable); + tempSet.addAll(document.getContent()); + while(document.hasNext()){ + pageable = pageable.next(); + document = ontologyRepository.getAll(lang, pageable); + tempSet.addAll(document.getContent()); + } + + for (V1Ontology ontology : tempSet){ + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (schemas.contains(field.getName())){ + try { + if(field.get(ontology.config) != null) + if (Collection.class.isAssignableFrom(field.getType())) { + for (String ontologyClassification : (Collection) field.get(ontology.config)){ + if(classifications.contains(ontologyClassification)) + filteredSet.add(ontology); + } + } else if (String.class.isAssignableFrom(field.getType())) { + if(field.get(ontology.config) != null) + if(classifications.contains(field.get(ontology.config))) + filteredSet.add(ontology); + } + } catch (IllegalAccessException e) { + e.printStackTrace(); + //throw new RuntimeException(e); + } + } + } + } + return filteredSet; + } + + public Set exclusiveFilter(Collection schemas, Collection classifications, Pageable pageable, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + Page document = ontologyRepository.getAll(lang, pageable); + tempSet.addAll(document.getContent()); + while(document.hasNext()){ + pageable = pageable.next(); + document = ontologyRepository.getAll(lang, pageable); + tempSet.addAll(document.getContent()); + } + + for (V1Ontology ontology : tempSet){ + Set fieldSet = new HashSet<>(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + fieldSet.add(field.getName()); + } + if (fieldSet.containsAll(schemas)){ + Set tempClassifications = new HashSet(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (Collection.class.isAssignableFrom(field.getType())){ + try { + if(field.get(ontology.config) != null) + for (String classification : classifications){ + if(((Collection) field.get(ontology.config)).contains(classification)) + tempClassifications.add(classification); + } + + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + else if (String.class.isAssignableFrom(field.getType())) { + try { + if(field.get(ontology.config) != null) + if(classifications.contains((String) field.get(ontology.config))) + tempClassifications.add( (String) field.get(ontology.config)); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + } + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + } + } + return filteredSet; + } + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index cbd2db376..ae723ce5c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -22,6 +22,8 @@ public class V1OntologyConfig { public String tracker; public String logo; public Collection creators; + public Collection collection; + public Collection subject; //public Map> annotations; public Object annotations; From bc6edaf9554ac86e583ae4eb93e36ae4d1a6bfd7 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 10 Jan 2024 12:22:05 +0100 Subject: [PATCH 012/146] enabled collection and subject variables in config --- .../ebi/spot/ols/controller/api/v1/V1OntologyController.java | 3 +-- .../java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java | 1 + .../ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index 95cf90b11..2e3a6a4e1 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -134,8 +134,7 @@ public Set filter(Collection schemas, Collection cla filteredSet.add(ontology); } } catch (IllegalAccessException e) { - e.printStackTrace(); - //throw new RuntimeException(e); + throw new RuntimeException(e); } } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index ae723ce5c..2002f90dc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -22,6 +22,7 @@ public class V1OntologyConfig { public String tracker; public String logo; public Collection creators; + public Collection collection; public Collection subject; //public Map> annotations; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 22f62e4db..9615c24e6 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -43,6 +43,8 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.tracker = JsonHelper.getString(localizedJson, "tracker"); ontology.config.logo = JsonHelper.getString(localizedJson, "logo"); ontology.config.creators = JsonHelper.getStrings(localizedJson, "creators"); + ontology.config.collection = JsonHelper.getStrings(localizedJson, "collection"); + ontology.config.subject = JsonHelper.getStrings(localizedJson, "subject"); ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); From 02bc447520aa50c657f5a968364b497d2f2eff1b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 18 Jan 2024 19:36:26 +0100 Subject: [PATCH 013/146] added composite classification variable for filtering --- .../java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java | 5 +++++ .../java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java | 5 +++++ .../ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java | 1 + 3 files changed, 11 insertions(+) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index 2002f90dc..35270ebc4 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -1,9 +1,12 @@ package uk.ac.ebi.spot.ols.model.v1; +import com.google.gson.JsonObject; import com.google.gson.annotations.SerializedName; import java.util.Collection; import java.util.HashSet; +import java.util.Map; +import java.util.Optional; public class V1OntologyConfig { @@ -26,6 +29,8 @@ public class V1OntologyConfig { public Collection collection; public Collection subject; //public Map> annotations; + + public JsonObject classifications; public Object annotations; public String fileLocation; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java index 3ee0d50af..785263937 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java @@ -6,6 +6,7 @@ import com.google.gson.JsonObject; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; public class JsonHelper { @@ -64,6 +65,10 @@ public static List getStrings(JsonObject json, String predicate) { return getValues(json, predicate).stream().map(JsonHelper::objectToString).collect(Collectors.toList()); } + public static JsonObject getObject(JsonObject json, String predicate) { + return getValues(json, predicate).stream().map(v -> v.getAsJsonObject()).findFirst().get(); + } + public static List getObjects(JsonObject json, String predicate) { return getValues(json, predicate).stream().map(v -> v.getAsJsonObject()).collect(Collectors.toList()); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 9615c24e6..b74a11fbd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -45,6 +45,7 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.creators = JsonHelper.getStrings(localizedJson, "creators"); ontology.config.collection = JsonHelper.getStrings(localizedJson, "collection"); ontology.config.subject = JsonHelper.getStrings(localizedJson, "subject"); + ontology.config.classifications = JsonHelper.getObject(localizedJson,"classifications"); ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); From 38265db3c767504f0be96e9b5e0472a49192f2f0 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 2 Feb 2024 22:26:49 +0100 Subject: [PATCH 014/146] enabled filtering from composite type --- .../ebi/spot/ols/repository/v1/JsonHelper.java | 13 +++++++++---- .../v1/mappers/V1OntologyMapper.java | 18 +++++++++++++----- 2 files changed, 22 insertions(+), 9 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java index 785263937..c5b13edbf 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java @@ -61,12 +61,17 @@ public static List getValues(JsonObject json, String predicate) { return List.of(value); } - public static List getStrings(JsonObject json, String predicate) { - return getValues(json, predicate).stream().map(JsonHelper::objectToString).collect(Collectors.toList()); + public static JsonObject getValue(JsonObject json, String predicate) { + JsonElement value = json.get(predicate); + if (value != null) + if (value.isJsonObject()) { + return value.getAsJsonObject(); + } + return null; } - public static JsonObject getObject(JsonObject json, String predicate) { - return getValues(json, predicate).stream().map(v -> v.getAsJsonObject()).findFirst().get(); + public static List getStrings(JsonObject json, String predicate) { + return getValues(json, predicate).stream().map(JsonHelper::objectToString).collect(Collectors.toList()); } public static List getObjects(JsonObject json, String predicate) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index b74a11fbd..479c6a886 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -8,8 +8,7 @@ import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.v1.JsonHelper; -import java.util.Map; -import java.util.Objects; +import java.util.*; public class V1OntologyMapper { @@ -43,9 +42,18 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.tracker = JsonHelper.getString(localizedJson, "tracker"); ontology.config.logo = JsonHelper.getString(localizedJson, "logo"); ontology.config.creators = JsonHelper.getStrings(localizedJson, "creators"); - ontology.config.collection = JsonHelper.getStrings(localizedJson, "collection"); - ontology.config.subject = JsonHelper.getStrings(localizedJson, "subject"); - ontology.config.classifications = JsonHelper.getObject(localizedJson,"classifications"); + List objects = JsonHelper.getObjects(localizedJson,"classifications"); + Set collectionSet = new HashSet(); + Set subjectSet = new HashSet(); + for (JsonObject object : objects){ + if(object.has("collection")) + collectionSet.addAll(JsonHelper.getStrings(object,"collection")); + if(object.has("subject")) + subjectSet.addAll(JsonHelper.getStrings(object,"subject")); + } + ontology.config.collection = collectionSet; + ontology.config.subject = subjectSet; + ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); From 34b8be886146c352f5267d2294b0ee49b5c08dce Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 9 Feb 2024 21:54:15 +0100 Subject: [PATCH 015/146] filtering for filterby, search, select, suggest methods --- .../api/v1/V1OntologyController.java | 90 +-------- .../controller/api/v1/V1SearchController.java | 9 + .../controller/api/v1/V1SelectController.java | 9 + .../api/v1/V1SuggestController.java | 10 + .../ols/repository/solr/OlsSolrClient.java | 23 +++ .../repository/v1/V1OntologyRepository.java | 178 +++++++++++++++++- 6 files changed, 229 insertions(+), 90 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index 2e3a6a4e1..acf98d202 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -93,11 +93,7 @@ HttpEntity> getOntologiesByMetadata( @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, PagedResourcesAssembler assembler ) throws ResourceNotFoundException { - Set tempSet = new HashSet<>(); - if(exclusive) - tempSet.addAll(exclusiveFilter(schemas,classifications,pageable,lang)); - else - tempSet.addAll(filter(schemas,classifications,pageable,lang)); + Set tempSet = ontologyRepository.filter(schemas,classifications,exclusive,lang); List tempList = new ArrayList(); tempList.addAll(tempSet); final int start = (int)pageable.getOffset(); @@ -107,90 +103,6 @@ HttpEntity> getOntologiesByMetadata( return new ResponseEntity<>( assembler.toModel(document, documentAssembler), HttpStatus.OK); } - public Set filter(Collection schemas, Collection classifications, Pageable pageable, String lang){ - Set tempSet = new HashSet(); - Set filteredSet = new HashSet(); - Page document = ontologyRepository.getAll(lang, pageable); - tempSet.addAll(document.getContent()); - while(document.hasNext()){ - pageable = pageable.next(); - document = ontologyRepository.getAll(lang, pageable); - tempSet.addAll(document.getContent()); - } - - for (V1Ontology ontology : tempSet){ - for (Field field : ontology.config.getClass().getDeclaredFields()){ - if (schemas.contains(field.getName())){ - try { - if(field.get(ontology.config) != null) - if (Collection.class.isAssignableFrom(field.getType())) { - for (String ontologyClassification : (Collection) field.get(ontology.config)){ - if(classifications.contains(ontologyClassification)) - filteredSet.add(ontology); - } - } else if (String.class.isAssignableFrom(field.getType())) { - if(field.get(ontology.config) != null) - if(classifications.contains(field.get(ontology.config))) - filteredSet.add(ontology); - } - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - } - } - return filteredSet; - } - - public Set exclusiveFilter(Collection schemas, Collection classifications, Pageable pageable, String lang){ - Set tempSet = new HashSet(); - Set filteredSet = new HashSet(); - Page document = ontologyRepository.getAll(lang, pageable); - tempSet.addAll(document.getContent()); - while(document.hasNext()){ - pageable = pageable.next(); - document = ontologyRepository.getAll(lang, pageable); - tempSet.addAll(document.getContent()); - } - - for (V1Ontology ontology : tempSet){ - Set fieldSet = new HashSet<>(); - for (Field field : ontology.config.getClass().getDeclaredFields()){ - fieldSet.add(field.getName()); - } - if (fieldSet.containsAll(schemas)){ - Set tempClassifications = new HashSet(); - for (Field field : ontology.config.getClass().getDeclaredFields()){ - if (Collection.class.isAssignableFrom(field.getType())){ - try { - if(field.get(ontology.config) != null) - for (String classification : classifications){ - if(((Collection) field.get(ontology.config)).contains(classification)) - tempClassifications.add(classification); - } - - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - else if (String.class.isAssignableFrom(field.getType())) { - try { - if(field.get(ontology.config) != null) - if(classifications.contains((String) field.get(ontology.config))) - tempClassifications.add( (String) field.get(ontology.config)); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - } - - } - if(tempClassifications.containsAll(classifications)) - filteredSet.add(ontology); - } - } - return filteredSet; - } - @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index a1f4ac641..9b0a9b76d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -3,12 +3,15 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; @@ -50,6 +53,10 @@ public class V1SearchController { @RequestMapping(path = "/api/search", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) public void search( @RequestParam("q") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, @RequestParam(value = "ontology", required = false) Collection ontologies, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, @@ -70,6 +77,8 @@ public void search( HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + final SolrQuery solrQuery = new SolrQuery(); // 1 if (queryFields == null) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index eeb0008ed..c56ad7ed5 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -3,12 +3,15 @@ import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; @@ -42,6 +45,10 @@ public class V1SelectController { @RequestMapping(path = "/api/select", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) public void select( @RequestParam("q") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, @RequestParam(value = "ontology", required = false) Collection ontologies, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, @@ -56,6 +63,8 @@ public void select( HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + final SolrQuery solrQuery = new SolrQuery(); // 1 String queryLc = query.toLowerCase(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 2cc170795..f9b09e288 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -1,6 +1,7 @@ package uk.ac.ebi.spot.ols.controller.api.v1; import com.google.gson.Gson; +import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrClient; @@ -8,6 +9,8 @@ import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; @@ -36,12 +39,19 @@ public class V1SuggestController { @RequestMapping(path = "/api/suggest", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) public void suggest( @RequestParam("q") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, @RequestParam(value = "ontology", required = false) Collection ontologies, @RequestParam(value = "rows", defaultValue = "10") Integer rows, @RequestParam(value = "start", defaultValue = "0") Integer start, + @RequestParam(value = "lang", defaultValue = "en") String lang, HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + final SolrQuery solrQuery = new SolrQuery(); String queryLc = query.toLowerCase(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java index a1cb659e4..cba66861b 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java @@ -23,8 +23,10 @@ import javax.validation.constraints.NotNull; import java.io.IOException; import java.net.URLDecoder; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; @@ -101,6 +103,27 @@ public JsonElement getFirst(OlsSolrQuery query) { return getOlsEntityFromSolrResult(qr.getResults().get(0)); } + public JsonElement getByIndex(OlsSolrQuery query, int i) { + + QueryResponse qr = runSolrQuery(query, null); + + if(qr.getResults().getNumFound() < 1) { + logger.debug("Expected at least 1 result for solr getFirst for solr query = {}", query.constructQuery().jsonStr()); + throw new RuntimeException("Expected at least 1 result for solr getFirst"); + } + + return getOlsEntityFromSolrResult(qr.getResults().get(i)); + } + + public Set getSet(OlsSolrQuery query){ + Set tempSet = new HashSet<>(); + QueryResponse qr = runSolrQuery(query, null); + for (int i = 0; i getSet(String lang){ + Set tempSet = new HashSet<>(); + Validation.validateLang(lang); + + OlsSolrQuery query = new OlsSolrQuery(); + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + + for (JsonElement element : solrClient.getSet(query)) + tempSet.add(V1OntologyMapper.mapOntology(element, lang)); + return tempSet; + } + public Page getAll(String lang, Pageable pageable) { Validation.validateLang(lang); @@ -42,4 +57,165 @@ public Page getAll(String lang, Pageable pageable) { return solrClient.searchSolrPaginated(query, pageable) .map(result -> V1OntologyMapper.mapOntology(result, lang)); } + + public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, String lang){ + if (schemas != null) + schemas.remove(""); + if (classifications != null) + classifications.remove(""); + if(ontologies != null) + ontologies.remove(""); + if((schemas == null || schemas.size() == 0 ) && (classifications == null || classifications.size() == 0 ) && (ontologies == null || ontologies.size() == 0)) + return null; + if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) + return ontologies; + Set documents = filter(schemas, classifications, exclusiveFilter,lang); + Set filteredOntologySet = new HashSet(); + for (V1Ontology document : documents){ + filteredOntologySet.add(document.ontologyId); + } + System.out.println("filteredOntologySet: "+filteredOntologySet); + if (( ontologies == null || ontologies.size() == 0) && filteredOntologySet.size() > 0) + return filteredOntologySet; + else if (schemas != null) + if ((ontologies == null || ontologies.size() == 0) && (schemas.size() > 0 || classifications.size() > 0 )) + return new HashSet(Arrays.asList("nosuchontologyfound")); + + Set postFilterOntologySet; + + if(ontologies == null){ + ontologies = new HashSet(); + System.out.println("ontologies == null"); + } else { + ontologies = new HashSet(ontologies); + System.out.println("ontologies <> null"); + } + + System.out.println("ontologies: "+ontologies); + if (exclusiveFilter){ + postFilterOntologySet = Sets.intersection(filteredOntologySet,new HashSet(ontologies)); + System.out.println("intersection"); + } else { + postFilterOntologySet = Sets.union(filteredOntologySet,new HashSet(ontologies)); + System.out.println("union"); + } + if(postFilterOntologySet.size() == 0) + postFilterOntologySet = new HashSet(Arrays.asList("nosuchontologyfound")); + return postFilterOntologySet; + } + + Set union(Collection a, Collection b ) { + Set union = new HashSet(); + for (String s : a){ + union.add(s); + } + for (String s : b){ + union.add(s); + } + return union; + } + + Set intersection(Collection a, Collection b ) { + Set intersection = new HashSet(); + for (String s1 : a){ + for (String s2 : b){ + if (s1.equals(s2)) + intersection.add(s1); + } + } + return intersection; + } + + public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + if(exclusive) + return exclusiveFilter(schemas,classifications,lang); + else + return inclusiveFilter(schemas,classifications,lang); + } + public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); +/* Page document = getAll(lang, pageable); + tempSet.addAll(document.getContent()); + while(document.hasNext()){ + pageable = pageable.next(); + document = getAll(lang, pageable); + tempSet.addAll(document.getContent()); + }*/ + tempSet.addAll(getSet(lang)); + + for (V1Ontology ontology : tempSet){ + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (schemas.contains(field.getName())){ + try { + if(field.get(ontology.config) != null) + if (Collection.class.isAssignableFrom(field.getType())) { + for (String ontologyClassification : (Collection) field.get(ontology.config)){ + if(classifications.contains(ontologyClassification)) + filteredSet.add(ontology); + } + } else if (String.class.isAssignableFrom(field.getType())) { + if(field.get(ontology.config) != null) + if(classifications.contains(field.get(ontology.config))) + filteredSet.add(ontology); + } + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + } + } + return filteredSet; + } + + public Set exclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); +/* Page document = getAll(lang, pageable); + tempSet.addAll(document.getContent()); + while(document.hasNext()){ + pageable = pageable.next(); + document = getAll(lang, pageable); + tempSet.addAll(document.getContent()); + }*/ + tempSet.addAll(getSet(lang)); + + for (V1Ontology ontology : tempSet){ + Set fieldSet = new HashSet<>(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + fieldSet.add(field.getName()); + } + if (fieldSet.containsAll(schemas)){ + Set tempClassifications = new HashSet(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (Collection.class.isAssignableFrom(field.getType())){ + try { + if(field.get(ontology.config) != null) + for (String classification : classifications){ + if(((Collection) field.get(ontology.config)).contains(classification)) + tempClassifications.add(classification); + } + + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + else if (String.class.isAssignableFrom(field.getType())) { + try { + if(field.get(ontology.config) != null) + if(classifications.contains((String) field.get(ontology.config))) + tempClassifications.add( (String) field.get(ontology.config)); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + } + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + } + } + return filteredSet; + } + } From 367ff54078c44f7cdf73e62d59625fa54a42a762 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:47:56 +0100 Subject: [PATCH 016/146] added classifications as a composite variable to the config --- .../main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java | 2 +- .../ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index 35270ebc4..1d6fa368a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -30,7 +30,7 @@ public class V1OntologyConfig { public Collection subject; //public Map> annotations; - public JsonObject classifications; + public Object classifications; public Object annotations; public String fileLocation; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 479c6a886..38aaea6cc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -53,7 +53,7 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { } ontology.config.collection = collectionSet; ontology.config.subject = subjectSet; - + ontology.config.classifications = gson.fromJson(localizedJson.get("classifications"), Collection.class); ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); From c551e677c124ac72525348ae9b8eda4990c363ee Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 13 Feb 2024 15:13:15 +0100 Subject: [PATCH 017/146] implemented call for listing available schema keys --- .../controller/api/v1/V1OntologyController.java | 6 ++++++ .../ols/repository/v1/V1OntologyRepository.java | 17 +++++++++++++++-- 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index acf98d202..adf453973 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -103,6 +103,12 @@ HttpEntity> getOntologiesByMetadata( return new ResponseEntity<>( assembler.toModel(document, documentAssembler), HttpStatus.OK); } + @RequestMapping(path = "/schemakeys", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterKeys( + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + return new ResponseEntity<>(ontologyRepository.getSchemaKeys(lang), HttpStatus.OK); + } + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index 4adb3c68b..d432e2415 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -29,8 +29,8 @@ public V1Ontology get(String ontologyId, String lang) { Validation.validateOntologyId(ontologyId); OlsSolrQuery query = new OlsSolrQuery(); - query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); - query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); return V1OntologyMapper.mapOntology(solrClient.getFirst(query), lang); } @@ -218,4 +218,17 @@ else if (String.class.isAssignableFrom(field.getType())) { return filteredSet; } + public Set getSchemaKeys(String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getSet(lang)); + Set keys = new HashSet<>(); + for (V1Ontology ontology : tempSet){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + keys.addAll(((Map>) o).keySet()); + } + } + return keys; + } + } From c84cca6c118e668c095cff90f1a039bdb6bf1f1d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 13 Feb 2024 16:54:05 +0100 Subject: [PATCH 018/146] implemented schema values method --- .../controller/api/v1/V1OntologyController.java | 7 +++++++ .../ols/repository/v1/V1OntologyRepository.java | 16 ++++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index adf453973..9c3f5198e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -109,6 +109,13 @@ HttpEntity> filterKeys( return new ResponseEntity<>(ontologyRepository.getSchemaKeys(lang), HttpStatus.OK); } + @RequestMapping(path = "/schemavalues", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterValues( + @RequestParam(value = "schema", required = true) Collection schemas, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + return new ResponseEntity<>(ontologyRepository.getSchemaValues(schemas,lang), HttpStatus.OK); + } + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index d432e2415..4f6460dcb 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -231,4 +231,20 @@ public Set getSchemaKeys(String lang){ return keys; } + public Set getSchemaValues(Collection schemas,String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getSet(lang)); + Set values = new HashSet<>(); + for (V1Ontology ontology : tempSet){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + for (Map.Entry> entry : ((Map>) o).entrySet()) + for (String value : entry.getValue()) + if(schemas.contains(entry.getKey())) + values.add(value); + } + } + return values; + } + } From 49af55c18fae5bf8adeb45d23d1fd0e70f616103 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 13 Feb 2024 18:19:09 +0100 Subject: [PATCH 019/146] paginated the schemakeys and schemavalues methods --- .../api/v1/V1OntologyController.java | 28 +++++++++++++++---- 1 file changed, 22 insertions(+), 6 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index 9c3f5198e..d235e5200 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -104,16 +104,32 @@ HttpEntity> getOntologiesByMetadata( } @RequestMapping(path = "/schemakeys", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) - HttpEntity> filterKeys( - @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ - return new ResponseEntity<>(ontologyRepository.getSchemaKeys(lang), HttpStatus.OK); + HttpEntity> filterKeys( + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaKeys(lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); } @RequestMapping(path = "/schemavalues", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) - HttpEntity> filterValues( + HttpEntity> filterValues( @RequestParam(value = "schema", required = true) Collection schemas, - @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ - return new ResponseEntity<>(ontologyRepository.getSchemaValues(schemas,lang), HttpStatus.OK); + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaValues(schemas,lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); } @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") From 6fc77ea9d21b3481cf86c8a205f45f24e6434eb1 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:03:31 +0100 Subject: [PATCH 020/146] enabled all rest controllers --- .../api/v1/V1IndividualController.java | 18 +++++++++--------- .../api/v1/V1OntologyController.java | 2 +- .../api/v1/V1OntologyIndividualController.java | 2 +- .../api/v1/V1OntologyPropertyController.java | 2 +- .../api/v1/V1PropertyController.java | 8 ++++---- .../controller/api/v1/V1SearchController.java | 2 +- .../controller/api/v1/V1SelectController.java | 2 +- .../controller/api/v1/V1SuggestController.java | 2 +- .../controller/api/v2/V2ClassController.java | 2 +- .../controller/api/v2/V2EntityController.java | 2 +- .../api/v2/V2IndividualController.java | 2 +- .../api/v2/V2OntologyController.java | 2 +- .../api/v2/V2PropertyController.java | 2 +- .../api/v2/V2StatisticsController.java | 2 +- 14 files changed, 25 insertions(+), 25 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java index c8b900379..7c967b65b 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java @@ -28,7 +28,7 @@ * @date 18/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/individuals") @ExposesResourceFor(V1Individual.class) public class V1IndividualController implements @@ -81,7 +81,7 @@ HttpEntity> getAllIndividuals( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + @RequestMapping(path = "/findByIdAndIsDefiningOntology/{id}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @PathVariable("id") String termId, @@ -92,11 +92,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( decoded = UriUtils.decode(termId, "UTF-8"); return getAllIndividualsByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - - - @RequestMapping(path = "/findByIdAndIsDefiningOntology", - produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, + } + + + @RequestMapping(path = "/findByIdAndIsDefiningOntology", + produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) String iri, @@ -120,11 +120,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { } -} \ No newline at end of file +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index d235e5200..7543dde03 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -35,7 +35,7 @@ * @date 19/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/ontologies") @ExposesResourceFor(V1Ontology.class) public class V1OntologyController implements diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java index 5ea636170..75b5ca482 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyIndividualController.java @@ -34,7 +34,7 @@ * @date 02/11/15 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController @RequestMapping("/api/ontologies") public class V1OntologyIndividualController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java index 9e7441d7e..e01786710 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java @@ -27,7 +27,7 @@ import javax.servlet.http.HttpServletRequest; import java.util.Arrays; -@Controller +@RestController @RequestMapping("/api/ontologies") public class V1OntologyPropertyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java index b6dce598a..3bc5a7fdc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java @@ -23,7 +23,7 @@ import javax.servlet.http.HttpServletRequest; -@Controller +@RestController @RequestMapping("/api/properties") @ExposesResourceFor(V1Property.class) public class V1PropertyController implements @@ -93,8 +93,8 @@ HttpEntity> getPropertiesByIriAndIsDefiningOntology(@Path String decoded = null; decoded = UriUtils.decode(termId, "UTF-8"); return getPropertiesByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - + } + @RequestMapping(path = "/findByIdAndIsDefiningOntology", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getPropertiesByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) String iri, @@ -121,7 +121,7 @@ else if (oboId != null) { return new ResponseEntity<>( assembler.toModel(terms, termAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index 9b0a9b76d..059c92821 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -38,7 +38,7 @@ * @date 02/07/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController public class V1SearchController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index c56ad7ed5..4b40e677e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -31,7 +31,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -@Controller +@RestController public class V1SelectController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index f9b09e288..3939a231c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -25,7 +25,7 @@ import java.nio.charset.StandardCharsets; import java.util.*; -@Controller +@RestController public class V1SuggestController { Gson gson = new Gson(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index d8b8b4fde..ec0e4fd6e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2ClassController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java index 6c760ae8e..11ad5e444 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2EntityController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java index bff23a360..e8342f2cd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2IndividualController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 6a9c8501e..ce996db00 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -29,7 +29,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2/ontologies") public class V2OntologyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java index ee847f3cd..9d690a53c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2PropertyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 0af7b2460..31b51eea0 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -20,7 +20,7 @@ import java.util.HashMap; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2/stats") public class V2StatisticsController { From c3b37fa4002f6ac1db4e6153494a07bcd46e8b49 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:30:21 +0100 Subject: [PATCH 021/146] added imports --- .../ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java | 5 ++--- .../ebi/spot/ols/controller/api/v1/V1SearchController.java | 4 +--- .../ebi/spot/ols/controller/api/v1/V1SelectController.java | 4 +--- .../ebi/spot/ols/controller/api/v1/V1SuggestController.java | 4 +--- .../ac/ebi/spot/ols/controller/api/v2/V2ClassController.java | 5 +---- .../ebi/spot/ols/controller/api/v2/V2EntityController.java | 5 +---- .../spot/ols/controller/api/v2/V2IndividualController.java | 5 +---- .../ebi/spot/ols/controller/api/v2/V2OntologyController.java | 5 +---- .../spot/ols/controller/api/v2/V2StatisticsController.java | 3 +-- 9 files changed, 10 insertions(+), 30 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java index 4452d07bf..dabe0309e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java @@ -6,8 +6,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; @@ -16,7 +15,7 @@ * @date 27/09/2016 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController public class V1ApiUnavailable { @RequestMapping(path = "/api/unavailable", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index 059c92821..cf4b495ab 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -14,9 +14,7 @@ import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index 4b40e677e..4c994cf89 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -14,9 +14,7 @@ import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 3939a231c..03afc176b 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -13,9 +13,7 @@ import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index ec0e4fd6e..3c1fd9e9a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -15,10 +15,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java index 11ad5e444..8d8c254bd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java @@ -10,10 +10,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java index e8342f2cd..c381840b5 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java @@ -11,10 +11,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index ce996db00..1d322cd1d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -12,10 +12,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; import uk.ac.ebi.spot.ols.model.v2.V2Entity; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 31b51eea0..f676af1dc 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -11,8 +11,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; From 0151b493d9d717e742a834886407278ac93fa5cf Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 21 Feb 2024 15:48:51 +0100 Subject: [PATCH 022/146] added config file for testing in #3 --- dataload/ontologies.json | 158 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 dataload/ontologies.json diff --git a/dataload/ontologies.json b/dataload/ontologies.json new file mode 100644 index 000000000..d374ea433 --- /dev/null +++ b/dataload/ontologies.json @@ -0,0 +1,158 @@ +{ + "name": "OBO Foundry", + "title": "The OBO Foundry", + "markdown": "kramdown", + "highlighter": "rouge", + "baseurl": "/", + "imgurl": "/images", + "repo": "https://github.com/OBOFoundry/OBOFoundry.github.io/", + "repo_src": "https://github.com/OBOFoundry/OBOFoundry.github.io/blob/master/", + "author": { + "name": "OBO Technical WG" + }, + "ontologies": [ + { + "ontology_purl": "http://purl.obolibrary.org/obo/duo.owl", + "description": "DUO is an ontology which represent data use conditions.", + "homepage": "https://github.com/EBISPOT/DUO", + "id": "duo", + "license": { + "label": "CC-BY", + "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", + "url": "http://creativecommons.org/licenses/by/3.0/" + }, + "title": "The Data Use Ontology", + "tracker": "https://github.com/EBISPOT/DUO/issues", + "preferred_root_term": [ + "http://purl.obolibrary.org/obo/DUO_0000001", + "http://purl.obolibrary.org/obo/DUO_0000017", + "http://purl.obolibrary.org/obo/OBI_0000066" + ], + "label_property": "http://www.w3.org/2000/01/rdf-schema#label", + "synonym_property": [ + "http://purl.obolibrary.org/obo/IAO_0000118", + "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", + "http://www.geneontology.org/formats/oboInOwl#shorthand" + ], + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "classification": [ + { + "collection": [ + "NFDI4ING", + "NFDI4CHEM", + "ESS" + ] + }, + { + "subject": [ + "Computer Science" + ] + } + ], + "repo_url": "https://github.com/EBISPOT/DUO" + }, + { + "ontology_purl": "http://purl.obolibrary.org/obo/ms.owl", + "description": "A structured controlled vocabulary for the annotation of experiments concerned with proteomics mass spectrometry.", + "homepage": "http://www.psidev.info/groups/controlled-vocabularies", + "id": "ms", + "license": { + "label": "CC-BY", + "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "title": "Mass Spectrometry", + "tracker": "https://github.com/HUPO-PSI/psi-ms-CV/issues", + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "creator": [ + "Yasset Perez-Riverol", + "Matt Chambers", + "Andreas Bertsch", + "Marius Kallhardt", + "Eric Deutsch", + "Fredrik Levander", + "Pierre-Alain Binz", + "Gerhard Mayer", + "Joshua Klein" + ], + "is_foundary": 1, + "preferredPrefix": "MS", + "classification": [ + { + "collection": [ + "NFDI4CHEM", + "DataPLANT" + ] + }, + { + "subject": [ + "Chemistry" + ] + } + ], + "repo-url": "https://www.psidev.info/groups/controlled-vocabularies" + }, + { + "ontology_purl": "http://www.ebi.ac.uk/efo/efo.owl", + "id": "efo", + "license": { + "url": "www.apache.org/licenses/LICENSE-2.0", + "label": "Apache license 2.0" + }, + "description": "The Experimental Factor Ontology (EFO) provides a systematic description of many experimental variables available in EBI databases, and for external projects such as the NHGRI GWAS catalogue. It combines parts of several biological ontologies, such as anatomy, disease and chemical compounds. The scope of EFO is to support the annotation, analysis and visualization of data handled by many groups at the EBI and as the core ontology for OpenTargets.org", + "homepage": "https://www.ebi.ac.uk/efo/", + "mailing_list": "efo-users@ebi.ac.uk", + "title": "Experimental Factor Ontology", + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "synonym_property": [ + "http://www.geneontology.org/formats/oboInOwl#hasRelatedSynonym", + "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", + "http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym" + ], + "creator": [ + "Gautier Koscielny", + "Jon Ison", + "Simon Jupp", + "Laura Huerta Martinez", + "Helen Parkinson", + "Zoe May Pendlington", + "Eleanor Williams", + "James Malone", + "Trish Whetzel", + "Sirarat Sarntivijai", + "Catherine Leroy", + "Ele Holloway", + "Tomasz Adamusiak", + "Emma Kate Hastings", + "Olamidipupo Ajigboye", + "Paola Roncaglia", + "Natalja Kurbatova", + "Dani Welter", + "Drashtti Vasant" + ], + "classification": [ + { + "collection": [ + "NFDI4CHEM", + "ESS", + "DataPLANT" + ] + }, + { + "subject": [ + "Life Sciences, biology", + "Medicine", + "Earth sciences" + ] + } + ], + "repo_url": "https://github.com/EBISPOT/efo" + } + ] +} From 111b42acfeeafd14712dd8e46a230ab51c5b7cc3 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 21 Feb 2024 17:15:12 +0100 Subject: [PATCH 023/146] moved sample labeled config to configs directory in #3 --- dataload/{ => configs}/ontologies.json | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename dataload/{ => configs}/ontologies.json (100%) diff --git a/dataload/ontologies.json b/dataload/configs/ontologies.json similarity index 100% rename from dataload/ontologies.json rename to dataload/configs/ontologies.json From 285857bbfaab06e866e3258fd6c7527776f1acb8 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 22 Feb 2024 15:45:29 +0100 Subject: [PATCH 024/146] renamed test config file and corrected variable name inside it for #3 --- dataload/configs/{ontologies.json => 3labeledonts.json} | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) rename dataload/configs/{ontologies.json => 3labeledonts.json} (97%) diff --git a/dataload/configs/ontologies.json b/dataload/configs/3labeledonts.json similarity index 97% rename from dataload/configs/ontologies.json rename to dataload/configs/3labeledonts.json index d374ea433..851a5e50e 100644 --- a/dataload/configs/ontologies.json +++ b/dataload/configs/3labeledonts.json @@ -37,7 +37,7 @@ "definition_property": [ "http://purl.obolibrary.org/obo/IAO_0000115" ], - "classification": [ + "classifications": [ { "collection": [ "NFDI4ING", @@ -81,7 +81,7 @@ ], "is_foundary": 1, "preferredPrefix": "MS", - "classification": [ + "classifications": [ { "collection": [ "NFDI4CHEM", @@ -94,7 +94,7 @@ ] } ], - "repo-url": "https://www.psidev.info/groups/controlled-vocabularies" + "repo_url": "https://www.psidev.info/groups/controlled-vocabularies" }, { "ontology_purl": "http://www.ebi.ac.uk/efo/efo.owl", @@ -136,7 +136,7 @@ "Dani Welter", "Drashtti Vasant" ], - "classification": [ + "classifications": [ { "collection": [ "NFDI4CHEM", From e87dcd358e50e053db74b0ba87639ece1b14f156 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 22 Feb 2024 16:15:30 +0100 Subject: [PATCH 025/146] refactored and added repo_url config variable --- .../spot/ols/model/v1/V1OntologyConfig.java | 2 + .../repository/v1/V1OntologyRepository.java | 58 +++++-------------- .../v1/mappers/V1OntologyMapper.java | 1 + 3 files changed, 16 insertions(+), 45 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index 1d6fa368a..571f52416 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -31,6 +31,8 @@ public class V1OntologyConfig { //public Map> annotations; public Object classifications; + + public String repoUrl; public Object annotations; public String fileLocation; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index 4f6460dcb..22c0dafa9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -104,28 +104,6 @@ else if (schemas != null) return postFilterOntologySet; } - Set union(Collection a, Collection b ) { - Set union = new HashSet(); - for (String s : a){ - union.add(s); - } - for (String s : b){ - union.add(s); - } - return union; - } - - Set intersection(Collection a, Collection b ) { - Set intersection = new HashSet(); - for (String s1 : a){ - for (String s2 : b){ - if (s1.equals(s2)) - intersection.add(s1); - } - } - return intersection; - } - public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ if(exclusive) return exclusiveFilter(schemas,classifications,lang); @@ -135,13 +113,6 @@ public Set filter(Collection schemas, Collection cla public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ Set tempSet = new HashSet(); Set filteredSet = new HashSet(); -/* Page document = getAll(lang, pageable); - tempSet.addAll(document.getContent()); - while(document.hasNext()){ - pageable = pageable.next(); - document = getAll(lang, pageable); - tempSet.addAll(document.getContent()); - }*/ tempSet.addAll(getSet(lang)); for (V1Ontology ontology : tempSet){ @@ -171,13 +142,6 @@ public Set inclusiveFilter(Collection schemas, Collection exclusiveFilter(Collection schemas, Collection classifications, String lang){ Set tempSet = new HashSet(); Set filteredSet = new HashSet(); -/* Page document = getAll(lang, pageable); - tempSet.addAll(document.getContent()); - while(document.hasNext()){ - pageable = pageable.next(); - document = getAll(lang, pageable); - tempSet.addAll(document.getContent()); - }*/ tempSet.addAll(getSet(lang)); for (V1Ontology ontology : tempSet){ @@ -223,9 +187,11 @@ public Set getSchemaKeys(String lang){ tempSet.addAll(getSet(lang)); Set keys = new HashSet<>(); for (V1Ontology ontology : tempSet){ - Collection temp = (Collection) ontology.config.classifications; - for (Object o : temp){ - keys.addAll(((Map>) o).keySet()); + if (ontology.config.classifications != null){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + keys.addAll(((Map>) o).keySet()); + } } } return keys; @@ -236,12 +202,14 @@ public Set getSchemaValues(Collection schemas,String lang){ tempSet.addAll(getSet(lang)); Set values = new HashSet<>(); for (V1Ontology ontology : tempSet){ - Collection temp = (Collection) ontology.config.classifications; - for (Object o : temp){ - for (Map.Entry> entry : ((Map>) o).entrySet()) - for (String value : entry.getValue()) - if(schemas.contains(entry.getKey())) - values.add(value); + if (ontology.config.classifications != null){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + for (Map.Entry> entry : ((Map>) o).entrySet()) + for (String value : entry.getValue()) + if(schemas.contains(entry.getKey())) + values.add(value); + } } } return values; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 38aaea6cc..7bfe769ae 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -72,6 +72,7 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.preferredRootTerms = JsonHelper.getStrings(localizedJson, "preferredRootTerms"); ontology.config.isSkos = localizedJson.has("isSkos") && localizedJson.get("isSkos").getAsBoolean(); + ontology.config.repoUrl = JsonHelper.getString(localizedJson, "repo_url"); ontology.config.allowDownload = localizedJson.has("allowDownload") && localizedJson.get("allowDownload").getAsBoolean(); From 071cdfc72e17e304152f1c138d75d6c97cdf31af Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 29 Feb 2024 10:14:58 +0100 Subject: [PATCH 026/146] added composite license variable in config for #3 --- .../uk/ac/ebi/spot/ols/model/v1/License.java | 55 +++++++++++++++ .../spot/ols/model/v1/V1OntologyConfig.java | 5 +- .../v1/mappers/V1OntologyMapper.java | 3 + dataload/configs/3labeledonts.json | 69 +++++++------------ 4 files changed, 84 insertions(+), 48 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java new file mode 100644 index 000000000..82cdee9b0 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java @@ -0,0 +1,55 @@ +package uk.ac.ebi.spot.ols.model.v1; + +import io.swagger.v3.oas.annotations.media.Schema; + +@Schema(name="license information of the ontology") +public class License { + + @Schema(name = "URL of the license", example = "http://creativecommons.org/licenses/by/4.0/") + String url; + @Schema(name = "Logo of the license", example = "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png") + String logo; + @Schema(name = "Label of the license", example = "CC-BY") + String label; + + public License() {} + + public License(String url, String logo, String label) { + super(); + this.url = url; + this.logo = logo; + this.label = label; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public String getLogo() { + return logo; + } + + public void setLogo(String logo) { + this.logo = logo; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + @Override + public String toString() { + return "License [url=" + url + ", logo=" + logo + ", label=" + label + "]"; + } + + + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index 571f52416..9e0b3458f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -1,12 +1,9 @@ package uk.ac.ebi.spot.ols.model.v1; -import com.google.gson.JsonObject; import com.google.gson.annotations.SerializedName; import java.util.Collection; import java.util.HashSet; -import java.util.Map; -import java.util.Optional; public class V1OntologyConfig { @@ -33,6 +30,8 @@ public class V1OntologyConfig { public Object classifications; public String repoUrl; + + public License license; public Object annotations; public String fileLocation; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 7bfe769ae..4d772ab85 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -3,6 +3,7 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import uk.ac.ebi.spot.ols.model.v1.License; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v1.V1OntologyConfig; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; @@ -54,6 +55,8 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.collection = collectionSet; ontology.config.subject = subjectSet; ontology.config.classifications = gson.fromJson(localizedJson.get("classifications"), Collection.class); + + ontology.config.license = gson.fromJson(localizedJson.get("license"), License.class); ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); diff --git a/dataload/configs/3labeledonts.json b/dataload/configs/3labeledonts.json index 851a5e50e..119a3fb74 100644 --- a/dataload/configs/3labeledonts.json +++ b/dataload/configs/3labeledonts.json @@ -97,62 +97,41 @@ "repo_url": "https://www.psidev.info/groups/controlled-vocabularies" }, { - "ontology_purl": "http://www.ebi.ac.uk/efo/efo.owl", - "id": "efo", + "ontology_purl": "https://raw.githubusercontent.com/tibonto/aeon/main/aeon.owl", + "description": "WIP - NOT READY FOR PRODUCTION - The Academic Event Ontology (AEON) is used to represent information regarding academic events.", + "homepage": "https://github.com/tibonto/aeon", + "id": "aeon", "license": { - "url": "www.apache.org/licenses/LICENSE-2.0", - "label": "Apache license 2.0" + "label": "CC-BY 4.0", + "url": "https://creativecommons.org/licenses/by/4.0/" }, - "description": "The Experimental Factor Ontology (EFO) provides a systematic description of many experimental variables available in EBI databases, and for external projects such as the NHGRI GWAS catalogue. It combines parts of several biological ontologies, such as anatomy, disease and chemical compounds. The scope of EFO is to support the annotation, analysis and visualization of data handled by many groups at the EBI and as the core ontology for OpenTargets.org", - "homepage": "https://www.ebi.ac.uk/efo/", - "mailing_list": "efo-users@ebi.ac.uk", - "title": "Experimental Factor Ontology", - "definition_property": [ - "http://purl.obolibrary.org/obo/IAO_0000115" - ], - "synonym_property": [ - "http://www.geneontology.org/formats/oboInOwl#hasRelatedSynonym", - "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", - "http://www.geneontology.org/formats/oboInOwl#hasNarrowSynonym" - ], - "creator": [ - "Gautier Koscielny", - "Jon Ison", - "Simon Jupp", - "Laura Huerta Martinez", - "Helen Parkinson", - "Zoe May Pendlington", - "Eleanor Williams", - "James Malone", - "Trish Whetzel", - "Sirarat Sarntivijai", - "Catherine Leroy", - "Ele Holloway", - "Tomasz Adamusiak", - "Emma Kate Hastings", - "Olamidipupo Ajigboye", - "Paola Roncaglia", - "Natalja Kurbatova", - "Dani Welter", - "Drashtti Vasant" + "title": "Academic Event Ontology", + "tracker": "https://github.com/tibonto/aeon/issues", + "preferredPrefix": "AEON", + "base_uri": [ + "https://github.com/tibonto/aeon" ], "classifications": [ { - "collection": [ - "NFDI4CHEM", - "ESS", - "DataPLANT" - ] + "collection": null }, { "subject": [ - "Life Sciences, biology", - "Medicine", - "Earth sciences" + "History", + "Social sciences", + "Educational science" ] } ], - "repo_url": "https://github.com/EBISPOT/efo" + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "synonym_property": [ + "http://purl.obolibrary.org/obo/IAO_0000118", + "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", + "http://www.geneontology.org/formats/oboInOwl#hasSynonym" + ], + "repo_url": "https://github.com/tibonto/aeon" } ] } From e56e916b7cdb820be11a21f885a6859e343f5ad7 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 4 Mar 2024 14:59:06 +0100 Subject: [PATCH 027/146] refactored and handled null cases --- .../ols/repository/v1/V1TermRepository.java | 175 +++++++++--------- 1 file changed, 87 insertions(+), 88 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java index 3b8e72246..6be9713db 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java @@ -4,10 +4,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.cache.annotation.CacheEvict; import org.springframework.cache.annotation.Cacheable; import org.springframework.data.domain.Page; -import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; import uk.ac.ebi.spot.ols.model.v1.V1Individual; @@ -311,41 +309,41 @@ public List> conceptTree (String ontologyId, boolean schema, bo listOfTerms.addAll(terms.getContent()); } - List> rootIndividuals = new ArrayList>(); + List> rootTerms = new ArrayList>(); int count = 0; if(schema) { - for (V1Term indiv : listOfTerms) - if (indiv.annotation.get("hasTopConcept") != null) { - for (String iriTopConcept : (LinkedHashSet) indiv.annotation.get("hasTopConcept")) { - V1Term topConceptIndividual = findIndividual(listOfTerms,iriTopConcept); - TreeNode topConcept = new TreeNode(topConceptIndividual); + for (V1Term term : listOfTerms) + if (term.annotation.get("hasTopConcept") != null) { + for (String iriTopConcept : (LinkedHashSet) term.annotation.get("hasTopConcept")) { + V1Term topConceptTerm = findTerm(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptTerm); topConcept.setIndex(String.valueOf(++count)); if(withChildren) { if(narrower) - populateChildrenandRelatedByNarrower(topConceptIndividual,topConcept,listOfTerms); + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); else - populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); } - rootIndividuals.add(topConcept); + rootTerms.add(topConcept); } } - } else for (V1Term individual : listOfTerms) { - TreeNode tree = new TreeNode(individual); + } else for (V1Term term : listOfTerms) { + TreeNode tree = new TreeNode(term); - if (tree.isRoot() && individual.annotation.get("topConceptOf") != null) { + if (tree.isRoot() && term.annotation.get("topConceptOf") != null) { tree.setIndex(String.valueOf(++count)); if(withChildren) { if(narrower) - populateChildrenandRelatedByNarrower(individual,tree,listOfTerms); + populateChildrenandRelatedByNarrower(term,tree,listOfTerms); else - populateChildrenandRelatedByBroader(individual,tree,listOfTerms); + populateChildrenandRelatedByBroader(term,tree,listOfTerms); } - rootIndividuals.add(tree); + rootTerms.add(tree); } } - return rootIndividuals; + return rootTerms; } @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") @@ -360,54 +358,55 @@ public List> conceptTreeWithoutTop (String ontologyId, boolean } Set rootIRIs = new HashSet(); - List> rootIndividuals = new ArrayList>(); + List> rootTerms = new ArrayList>(); int count = 0; if(!narrower) { - for (V1Term individual : listOfTerms) { - if (individual.annotation.get("broader") != null) { - for (String iriBroader : (LinkedHashSet) individual.annotation.get("broader")) { - V1Term broaderIndividual = findIndividual(listOfTerms,iriBroader); - if (broaderIndividual.annotation.get("broader") == null) { - rootIRIs.add(iriBroader); - } - } - } + for (V1Term term : listOfTerms) { + if(term.annotation != null && term.annotation.get("broader") != null) { + for (String iriBroader : (LinkedHashSet) term.annotation.get("broader")) { + V1Term broaderTerm = findTerm(listOfTerms, iriBroader); + if (broaderTerm.annotation != null && broaderTerm.annotation.get("broader") == null) { + rootIRIs.add(iriBroader); + } + + } + } } for (String iri : rootIRIs) { - V1Term topConceptIndividual = findIndividual(listOfTerms, iri); - TreeNode topConcept = new TreeNode(topConceptIndividual); + V1Term topConceptTerm = findTerm(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptTerm); topConcept.setIndex(String.valueOf(++count)); if(withChildren) - populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); - rootIndividuals.add(topConcept); + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + rootTerms.add(topConcept); } } else { - for (V1Term individual : listOfTerms) { - if (individual.annotation.get("narrower") != null) { - boolean root = true; - for (V1Term indiv : listOfTerms) { - if (indiv.annotation.get("narrower") != null) { - for (String iriNarrower : (LinkedHashSet) indiv.annotation.get("narrower")) { - if (individual.iri.equals(iriNarrower)) - root = false; - } - } - } - - if(root) { - TreeNode topConcept = new TreeNode(individual); - topConcept.setIndex(String.valueOf(++count)); - if(withChildren) - populateChildrenandRelatedByNarrower(individual,topConcept,listOfTerms); - rootIndividuals.add(topConcept); - } - } + for (V1Term term : listOfTerms) { + if (term.annotation != null && term.annotation.get("narrower") != null) { + boolean root = true; + for (V1Term v1Term : listOfTerms) { + if (v1Term.annotation != null && v1Term.annotation.get("narrower") != null) { + for (String iriNarrower : (LinkedHashSet) v1Term.annotation.get("narrower")) { + if (term.iri.equals(iriNarrower)) + root = false; + } + } + } + + if (root) { + TreeNode topConcept = new TreeNode(term); + topConcept.setIndex(String.valueOf(++count)); + if (withChildren) + populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); + rootTerms.add(topConcept); + } + } } } - return rootIndividuals; + return rootTerms; } @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") @@ -421,30 +420,30 @@ public TreeNode conceptSubTree(String ontologyId, String iri, boolean na listOfTerms.addAll(terms.getContent()); } - V1Term topConceptIndividual = findIndividual(listOfTerms,iri); - TreeNode topConcept = new TreeNode(topConceptIndividual); + V1Term topConceptTerm = findTerm(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptTerm); topConcept.setIndex(index); if(narrower) - populateChildrenandRelatedByNarrower(topConceptIndividual,topConcept,listOfTerms); + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); else - populateChildrenandRelatedByBroader(topConceptIndividual,topConcept,listOfTerms); + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); return topConcept; } - public V1Term findIndividual(List wholeList, String iri) { - for (V1Term individual : wholeList) - if(individual.iri.equals(iri)) - return individual; + public V1Term findTerm(List wholeList, String iri) { + for (V1Term term : wholeList) + if(term.iri.equals(iri)) + return term; return new V1Term(); } public List findRelated(String ontologyId, String iri, String relationType, String lang) { List related = new ArrayList(); - V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); - if (individual != null) - if (individual.annotation.get(relationType) != null) - for (String iriBroader : (LinkedHashSet) individual.annotation.get(relationType)) + V1Term term = this.findByOntologyAndIri(ontologyId, iri, lang); + if (term != null) + if (term.annotation.get(relationType) != null) + for (String iriBroader : (LinkedHashSet) term.annotation.get(relationType)) related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); return related; @@ -453,10 +452,10 @@ public List findRelated(String ontologyId, String iri, String relationTy public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable){ List related = new ArrayList(); - V1Term individual = this.findByOntologyAndIri(ontologyId, iri, lang); - if(individual == null) + V1Term v1Term = this.findByOntologyAndIri(ontologyId, iri, lang); + if(v1Term == null) return related; - if(individual.iri == null) + if(v1Term.iri == null) return related; Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); @@ -479,41 +478,41 @@ public List findRelated(String ontologyId, String iri, String relationTy return related; } - public void populateChildrenandRelatedByNarrower(V1Term individual, TreeNode tree, List listOfTerms ) { + public void populateChildrenandRelatedByNarrower(V1Term term, TreeNode tree, List listOfTerms ) { - if (individual.annotation != null) - for (String iriRelated : (LinkedHashSet) individual.annotation.getOrDefault("related", new LinkedHashSet())) { - TreeNode related = new TreeNode(findIndividual(listOfTerms, iriRelated)); + if (term.annotation != null) + for (String iriRelated : (LinkedHashSet) term.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); related.setIndex(tree.getIndex() + ".related"); tree.addRelated(related); } int count = 0; - if (individual.annotation != null) - for (String iriChild : (LinkedHashSet) individual.annotation.getOrDefault("narrower", new LinkedHashSet())) { - V1Term childIndividual = findIndividual(listOfTerms, iriChild); - TreeNode child = new TreeNode(childIndividual); + if (term.annotation != null) + for (String iriChild : (LinkedHashSet) term.annotation.getOrDefault("narrower", new LinkedHashSet())) { + V1Term childTerm = findTerm(listOfTerms, iriChild); + TreeNode child = new TreeNode(childTerm); child.setIndex(tree.getIndex() + "." + ++count); - populateChildrenandRelatedByNarrower(childIndividual, child, listOfTerms); + populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); tree.addChild(child); } } - public void populateChildrenandRelatedByBroader(V1Term individual, TreeNode tree, List listOfTerms) { - if (individual.annotation != null) - for (String iriRelated : (LinkedHashSet) individual.annotation.getOrDefault("related", new LinkedHashSet())) { - TreeNode related = new TreeNode(findIndividual(listOfTerms, iriRelated)); + public void populateChildrenandRelatedByBroader(V1Term term, TreeNode tree, List listOfTerms) { + if (term.annotation != null) + for (String iriRelated : (LinkedHashSet) term.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); related.setIndex(tree.getIndex() + ".related"); tree.addRelated(related); } int count = 0; - for ( V1Term indiv : listOfTerms) { - if (indiv.annotation != null) - for (String iriBroader : (LinkedHashSet) indiv.annotation.getOrDefault("broader",new LinkedHashSet())) - if(individual.iri != null) - if (individual.iri.equals(iriBroader)) { - TreeNode child = new TreeNode(indiv); + for ( V1Term v1Term : listOfTerms) { + if (v1Term.annotation != null) + for (String iriBroader : (LinkedHashSet) v1Term.annotation.getOrDefault("broader",new LinkedHashSet())) + if(term.iri != null) + if (term.iri.equals(iriBroader)) { + TreeNode child = new TreeNode(v1Term); child.setIndex(tree.getIndex()+"."+ ++count); - populateChildrenandRelatedByBroader(indiv,child,listOfTerms); + populateChildrenandRelatedByBroader(v1Term,child,listOfTerms); tree.addChild(child); } } From 757cd4e317d4a37338da0804e828719ff56d43f6 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 7 Mar 2024 18:36:04 +0100 Subject: [PATCH 028/146] implemented statistics method enhanced with filtering for #3 --- .../api/v2/V2StatisticsController.java | 58 ++++++++++++++++++- 1 file changed, 56 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index f676af1dc..f44943d4c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -1,5 +1,6 @@ package uk.ac.ebi.spot.ols.controller.api.v2; +import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; @@ -14,19 +15,27 @@ import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; +import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; +import uk.ac.ebi.spot.ols.repository.solr.SearchType; +import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import java.io.IOException; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; @RestController -@RequestMapping("/api/v2/stats") +@RequestMapping("/api/v2") public class V2StatisticsController { @Autowired OlsSolrClient solrClient; - @RequestMapping(produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @Autowired + private V1OntologyRepository ontologyRepository; + + @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics() throws ResourceNotFoundException, IOException { Map coreStatus = solrClient.getCoreStatus(); @@ -58,4 +67,49 @@ public HttpEntity getStatistics() throws ResourceNotFoundException return new ResponseEntity<>( stats, HttpStatus.OK); } + @RequestMapping(path = "/statsby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity getStatistics( + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @RequestParam(value = "ontologyIds", required = false) Collection ontologyIds, + @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ + + ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,lang); + + StringBuilder sb = new StringBuilder(); + for (String id : ontologyIds){ + sb.append("ontologyId:").append(id).append(" OR "); + } + + String queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); + Map coreStatus = solrClient.getCoreStatus(); + Map indexStatus = (Map) coreStatus.get("index"); + String lastModified = (String) indexStatus.get("lastModified"); + + SolrQuery query = new SolrQuery(); + query.setQuery(queryString); + query.setFacet(true); + query.addFacetField("type"); + query.setRows(0); + + QueryResponse qr = solrClient.runSolrQuery(query, null); + + Map counts = new HashMap<>(); + + for(FacetField.Count count : qr.getFacetField("type").getValues()) { + counts.put(count.getName(), (int)count.getCount()); + } + + V2Statistics stats = new V2Statistics(); + stats.lastModified = lastModified; + stats.numberOfOntologies = counts.containsKey("ontology") ? counts.get("ontology") : 0; + stats.numberOfClasses = counts.containsKey("class") ? counts.get("class") : 0; + stats.numberOfIndividuals = counts.containsKey("individual") ? counts.get("individual") : 0; + stats.numberOfProperties = counts.containsKey("property") ? counts.get("property") : 0; + + return new ResponseEntity<>( stats, HttpStatus.OK); + } + } From 35a0409e7c9de265282881b8d2326cc967fde084 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 8 Mar 2024 17:08:43 +0100 Subject: [PATCH 029/146] refactored statistics methods --- .../api/v2/V2StatisticsController.java | 41 ++++--------------- 1 file changed, 7 insertions(+), 34 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index f44943d4c..a8d6807f8 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -11,18 +11,14 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; -import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; -import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; -import uk.ac.ebi.spot.ols.repository.solr.SearchType; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import java.io.IOException; import java.util.Collection; import java.util.HashMap; -import java.util.List; import java.util.Map; @RestController @@ -37,34 +33,7 @@ public class V2StatisticsController { @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics() throws ResourceNotFoundException, IOException { - - Map coreStatus = solrClient.getCoreStatus(); - Map indexStatus = (Map) coreStatus.get("index"); - String lastModified = (String) indexStatus.get("lastModified"); - - SolrQuery query = new SolrQuery(); - - query.setQuery("*:*"); - query.setFacet(true); - query.addFacetField("type"); - query.setRows(0); - - QueryResponse qr = solrClient.runSolrQuery(query, null); - - Map counts = new HashMap<>(); - - for(FacetField.Count count : qr.getFacetField("type").getValues()) { - counts.put(count.getName(), (int)count.getCount()); - } - - V2Statistics stats = new V2Statistics(); - stats.lastModified = lastModified; - stats.numberOfOntologies = counts.containsKey("ontology") ? counts.get("ontology") : 0; - stats.numberOfClasses = counts.containsKey("class") ? counts.get("class") : 0; - stats.numberOfIndividuals = counts.containsKey("individual") ? counts.get("individual") : 0; - stats.numberOfProperties = counts.containsKey("property") ? counts.get("property") : 0; - - return new ResponseEntity<>( stats, HttpStatus.OK); + return new ResponseEntity<>( computeStats("*:*"), HttpStatus.OK); } @RequestMapping(path = "/statsby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) @@ -84,6 +53,11 @@ public HttpEntity getStatistics( } String queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); + return new ResponseEntity<>( computeStats(queryString), HttpStatus.OK); + } + + private V2Statistics computeStats(String queryString) throws IOException { + Map coreStatus = solrClient.getCoreStatus(); Map indexStatus = (Map) coreStatus.get("index"); String lastModified = (String) indexStatus.get("lastModified"); @@ -109,7 +83,6 @@ public HttpEntity getStatistics( stats.numberOfIndividuals = counts.containsKey("individual") ? counts.get("individual") : 0; stats.numberOfProperties = counts.containsKey("property") ? counts.get("property") : 0; - return new ResponseEntity<>( stats, HttpStatus.OK); + return stats; } - } From 09b268b0dfbbc52d14223018efdda8de58d2b77d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 11 Mar 2024 11:45:37 +0100 Subject: [PATCH 030/146] implemented call that retrieves all statistics with respect to schema --- backend/pom.xml | 6 +++++ .../api/v2/V2StatisticsController.java | 27 ++++++++++++++++--- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/backend/pom.xml b/backend/pom.xml index 55586d644..935a5a0ed 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -145,6 +145,12 @@ springdoc-openapi-ui 1.6.4 + + org.apache.commons + commons-collections4 + 4.4 + compile + diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index a8d6807f8..571aa2043 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -1,6 +1,8 @@ package uk.ac.ebi.spot.ols.controller.api.v2; +import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; +import org.apache.commons.collections4.map.MultiKeyMap; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; @@ -17,9 +19,7 @@ import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; +import java.util.*; @RestController @RequestMapping("/api/v2") @@ -56,6 +56,27 @@ public HttpEntity getStatistics( return new ResponseEntity<>( computeStats(queryString), HttpStatus.OK); } + @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity getStatisticsBySchema( + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "lang", defaultValue = "en") String lang + + ) throws IOException { + MultiKeyMap summaries = new MultiKeyMap(); + + Collection keys = ontologyRepository.getSchemaKeys(lang); + + for (String key : keys) { + Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); + + for (String value : values) { + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), false,Collections.emptySet(),lang)); + } + } + + return new ResponseEntity<>( summaries, HttpStatus.OK); + } + private V2Statistics computeStats(String queryString) throws IOException { Map coreStatus = solrClient.getCoreStatus(); From 02cf1ffb0354cf4c769d3e7fbaa3032c180d766e Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 11 Mar 2024 13:10:06 +0100 Subject: [PATCH 031/146] handled null case and added swagger documentation --- .../controller/api/v2/V2StatisticsController.java | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 571aa2043..87d1ae198 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -31,11 +31,13 @@ public class V2StatisticsController { @Autowired private V1OntologyRepository ontologyRepository; + @Operation(description = "Get Whole System Statistics. Components in all ontologies are taken into consideration") @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics() throws ResourceNotFoundException, IOException { return new ResponseEntity<>( computeStats("*:*"), HttpStatus.OK); } + @Operation(description = "Get Schema and Classification based Statistics. Possible schema keys and possible classification values of particular keys can be inquired with /api/ontologies/schemakeys and /api/ontologies/schemavalues methods respectively.") @RequestMapping(path = "/statsby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics( @RequestParam(value = "schema", required = false) Collection schemas, @@ -46,16 +48,17 @@ public HttpEntity getStatistics( @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,lang); - StringBuilder sb = new StringBuilder(); - for (String id : ontologyIds){ - sb.append("ontologyId:").append(id).append(" OR "); + String queryString = "none"; + if(ontologyIds != null){ + for (String id : ontologyIds){ + sb.append("ontologyId:").append(id).append(" OR "); + } + queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); } - - String queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); return new ResponseEntity<>( computeStats(queryString), HttpStatus.OK); } - + @Operation(description = "Get Schema based Statistics. All schemas with their respective classifications can be computed if a schema is not specified.") @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity getStatisticsBySchema( @RequestParam(value = "schema", required = false) Collection schemas, From a8073636728428b4546f3871ad84edca2955a289 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 27 Mar 2024 19:46:36 +0100 Subject: [PATCH 032/146] implemented filtering for v2 ontology controller --- .../api/v2/V2OntologyController.java | 53 ++++- .../api/v2/helpers/DynamicQueryHelper.java | 4 +- .../repository/v1/V1OntologyRepository.java | 10 +- .../repository/v2/V2OntologyRepository.java | 181 +++++++++++++++++- 4 files changed, 230 insertions(+), 18 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 1d322cd1d..28e82333b 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -1,12 +1,19 @@ package uk.ac.ebi.spot.ols.controller.api.v2; import com.google.gson.Gson; +import io.swagger.v3.oas.annotations.Parameter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.data.web.PageableDefault; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.Link; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; import org.springframework.http.HttpEntity; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; @@ -15,16 +22,14 @@ import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; +import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.solr.OlsFacetedResultsPage; import uk.ac.ebi.spot.ols.repository.v2.V2EntityRepository; import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; @RestController @RequestMapping("/api/v2/ontologies") @@ -46,9 +51,13 @@ public HttpEntity> getOntologies( @RequestParam(value = "boostFields", required = false) String boostFields, @RequestParam(value = "exactMatch", required = false, defaultValue = "false") boolean exactMatch, @RequestParam(value = "includeObsoleteEntities", required = false, defaultValue = "false") boolean includeObsoleteEntities, - @RequestParam Map> searchProperties + @RequestParam Map> searchProperties, + @RequestParam(value = "schema", required = false) List schemas, + @RequestParam(value = "classification", required = false) List classifications, + @RequestParam(value = "ontology", required = false) List ontologies, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive ) throws ResourceNotFoundException, IOException { - Map> properties = new HashMap<>(); if(!includeObsoleteEntities) properties.put("isObsolete", List.of("false")); @@ -56,7 +65,7 @@ public HttpEntity> getOntologies( return new ResponseEntity<>( new V2PagedAndFacetedResponse<>( - ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties)) + ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive) ), HttpStatus.OK); } @@ -71,4 +80,34 @@ public HttpEntity getOntology( if (entity == null) throw new ResourceNotFoundException(); return new ResponseEntity<>( entity, HttpStatus.OK); } + + @RequestMapping(path = "/schemakeys", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterKeys( + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaKeys(lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + + @RequestMapping(path = "/schemavalues", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterValues( + @RequestParam(value = "schema", required = true) Collection schemas, + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaValues(schemas,lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java index 80b1ade32..684084742 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java @@ -20,8 +20,8 @@ public static Map> filterProperties(Map getSet(String lang){ + public Set getAll(String lang){ Set tempSet = new HashSet<>(); Validation.validateLang(lang); @@ -113,7 +113,7 @@ public Set filter(Collection schemas, Collection cla public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ Set tempSet = new HashSet(); Set filteredSet = new HashSet(); - tempSet.addAll(getSet(lang)); + tempSet.addAll(getAll(lang)); for (V1Ontology ontology : tempSet){ for (Field field : ontology.config.getClass().getDeclaredFields()){ @@ -142,7 +142,7 @@ public Set inclusiveFilter(Collection schemas, Collection exclusiveFilter(Collection schemas, Collection classifications, String lang){ Set tempSet = new HashSet(); Set filteredSet = new HashSet(); - tempSet.addAll(getSet(lang)); + tempSet.addAll(getAll(lang)); for (V1Ontology ontology : tempSet){ Set fieldSet = new HashSet<>(); @@ -184,7 +184,7 @@ else if (String.class.isAssignableFrom(field.getType())) { public Set getSchemaKeys(String lang){ Set tempSet = new HashSet(); - tempSet.addAll(getSet(lang)); + tempSet.addAll(getAll(lang)); Set keys = new HashSet<>(); for (V1Ontology ontology : tempSet){ if (ontology.config.classifications != null){ @@ -199,7 +199,7 @@ public Set getSchemaKeys(String lang){ public Set getSchemaValues(Collection schemas,String lang){ Set tempSet = new HashSet(); - tempSet.addAll(getSet(lang)); + tempSet.addAll(getAll(lang)); Set values = new HashSet<>(); for (V1Ontology ontology : tempSet){ if (ontology.config.classifications != null){ diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index ce050838c..2cf34acbf 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -1,10 +1,13 @@ package uk.ac.ebi.spot.ols.repository.v2; +import com.google.common.collect.Sets; +import com.google.gson.JsonElement; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -14,14 +17,16 @@ import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; +import uk.ac.ebi.spot.ols.repository.v1.mappers.V1OntologyMapper; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; -import java.util.Collection; -import java.util.List; +import java.lang.reflect.Field; +import java.util.*; import java.io.IOException; -import java.util.Map; + +import static java.util.stream.Collectors.toCollection; @Component public class V2OntologyRepository { @@ -34,7 +39,7 @@ public class V2OntologyRepository { public OlsFacetedResultsPage find( - Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties) throws IOException { + Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive) throws IOException { Validation.validateLang(lang); @@ -47,6 +52,14 @@ public OlsFacetedResultsPage find( query.setSearchText(search); query.setExactMatch(exactMatch); query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + System.out.println("0"); + Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, lang); + if(filteredOntologies != null){ + for (String ontologyId : filteredOntologies) + Validation.validateOntologyId(ontologyId); + query.addFilter("ontologyId",filteredOntologies, SearchType.CASE_INSENSITIVE_TOKENS); + } + V2SearchFieldsParser.addSearchFieldsToQuery(query, searchFields); V2SearchFieldsParser.addBoostFieldsToQuery(query, boostFields); V2DynamicFilterParser.addDynamicFiltersToQuery(query, properties); @@ -77,6 +90,166 @@ public V2Entity getById(String ontologyId, String lang) throws ResourceNotFoundE ); } + public Set getOntologies(String lang){ + Set entities = new HashSet<>(); + OlsSolrQuery query = new OlsSolrQuery(); + + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + for (JsonElement element : solrClient.getSet(query)) + entities.add(new V2Entity( + LocalizationTransform.transform( + RemoveLiteralDatatypesTransform.transform( + element + ), + lang + ) + )); + + return entities; + + } + + public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, String lang){ + if (schemas != null) + schemas.remove(""); + if (classifications != null) + classifications.remove(""); + if(ontologies != null) + ontologies.remove(""); + if((schemas == null || schemas.size() == 0 ) && (classifications == null || classifications.size() == 0 ) && (ontologies == null || ontologies.size() == 0)) + return null; + if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) + return ontologies; + Set documents = filter(schemas, classifications, exclusiveFilter,lang); + Set filteredOntologySet = new HashSet(); + for (V2Entity document : documents){ + filteredOntologySet.add(document.any().get("ontologyId").toString()); + } + System.out.println("filteredOntologySet: "+filteredOntologySet); + if (( ontologies == null || ontologies.size() == 0) && filteredOntologySet.size() > 0) + return filteredOntologySet; + else if (schemas != null) + if ((ontologies == null || ontologies.size() == 0) && (schemas.size() > 0 || classifications.size() > 0 )) + return new HashSet(Arrays.asList("nosuchontologyfound")); + + Set postFilterOntologySet; + + if(ontologies == null){ + ontologies = new HashSet(); + System.out.println("ontologies == null"); + } else { + ontologies = new HashSet(ontologies); + System.out.println("ontologies <> null"); + } + + System.out.println("ontologies: "+ontologies); + if (exclusiveFilter){ + postFilterOntologySet = Sets.intersection(filteredOntologySet,new HashSet(ontologies)); + System.out.println("intersection"); + } else { + postFilterOntologySet = Sets.union(filteredOntologySet,new HashSet(ontologies)); + System.out.println("union"); + } + if(postFilterOntologySet.size() == 0) + postFilterOntologySet = new HashSet(Arrays.asList("nosuchontologyfound")); + return postFilterOntologySet; + } + + public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + if(schemas != null && classifications != null) + if(!exclusive) { + for (V2Entity ontologyDocument : getOntologies(lang)) { + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } + + } + } + } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { + String schema = schemas.iterator().next(); + String classification = classifications.iterator().next(); + System.out.println("schema: "+schema); + System.out.println("classification: "+classification); + for (V2Entity ontologyDocument : getOntologies(lang)){ + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } + + } + } + } else { + for (V2Entity ontologyDocument : getOntologies(lang)) { + Set tempClassifications = new HashSet(); + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) { + for (Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { + for (String schema : schemas) + if (classificationSchema.containsKey(schema)) { + for (String classification : classifications) { + if (classificationSchema.get(schema) != null) { + if (!classificationSchema.get(schema).isEmpty()) { + if (classificationSchema.get(schema).contains(classification)) { + tempClassifications.add(classification); + } + } + } + } + } + } + if (tempClassifications.containsAll(classifications)) + tempSet.add(ontologyDocument); + } + } + } + return tempSet; + } + + public Set getSchemaKeys(String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + Set keys = new HashSet<>(); + for (V2Entity ontology : tempSet){ + if (ontology.any().containsKey("classifications")){ + Collection temp = (Collection) ontology.any().get("classifications"); + for (Object o : temp){ + keys.addAll(((Map>) o).keySet()); + } + } + } + return keys; + } + + public Set getSchemaValues(Collection schemas,String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + Set values = new HashSet<>(); + for (V2Entity ontology : tempSet){ + if (ontology.any().containsKey("classifications")){ + Collection temp = (Collection) ontology.any().get("classifications"); + for (Object o : temp){ + for (Map.Entry> entry : ((Map>) o).entrySet()) + for (String value : entry.getValue()) + if(schemas.contains(entry.getKey())) + values.add(value); + } + } + } + return values; + } } From 5baa4f3536fc00698d02543a8d768da04a89c9d4 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 28 Mar 2024 12:26:03 +0100 Subject: [PATCH 033/146] changed the underlying ontology repository class to v2 for ontology statistics --- .../ebi/spot/ols/controller/api/v2/V2StatisticsController.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 87d1ae198..0db4fe7a0 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -17,6 +17,7 @@ import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; +import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; import java.io.IOException; import java.util.*; @@ -29,7 +30,7 @@ public class V2StatisticsController { OlsSolrClient solrClient; @Autowired - private V1OntologyRepository ontologyRepository; + private V2OntologyRepository ontologyRepository; @Operation(description = "Get Whole System Statistics. Components in all ontologies are taken into consideration") @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) From 49b63a4d27bcf26144e9c92380966b6aa26431e2 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 3 Apr 2024 19:46:21 +0200 Subject: [PATCH 034/146] implemented skos v2 api for EBISPOT#625 and TIBHannover#1 --- .../controller/api/v2/V2ClassController.java | 2 +- .../v2/V2OntologySKOSConceptController.java | 395 ++++++++++++++++++ .../ols/repository/solr/OlsSolrClient.java | 4 +- .../ols/repository/v2/V2ClassRepository.java | 246 ++++++++++- 4 files changed, 640 insertions(+), 7 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index 3c1fd9e9a..b35203b4c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -98,7 +98,7 @@ public HttpEntity getClass( iri = UriUtils.decode(iri, "UTF-8"); - V2Entity entity = classRepository.getByOntologyIdAndIri(ontologyId, iri, lang); + V2Entity entity = classRepository.findByOntologyAndIri(ontologyId, iri, lang); if (entity == null) throw new ResourceNotFoundException(); return new ResponseEntity<>( entity, HttpStatus.OK); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java new file mode 100644 index 000000000..55147f0c7 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java @@ -0,0 +1,395 @@ +package uk.ac.ebi.spot.ols.controller.api.v2; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.google.gson.JsonObject; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.util.UriUtils; +import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; +import uk.ac.ebi.spot.ols.controller.api.v1.V1TermAssembler; +import uk.ac.ebi.spot.ols.model.v1.V1Term; +import uk.ac.ebi.spot.ols.model.v2.V2Entity; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; +import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; +import uk.ac.ebi.spot.ols.repository.v2.V2ClassRepository; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.util.*; + +/** + * @author Simon Jupp + * @date 02/11/15 + * Samples, Phenotypes and Ontologies Team, EMBL-EBI + */ +@RestController +@RequestMapping("/api/v2/ontologies") +@Tag(name = "v2-ontology-skos-controller", description = "SKOS concept hierarchies and relations extracted from individuals (instances) from a particular ontology in this service") +public class V2OntologySKOSConceptController { + + @Autowired + V2ClassRepository classRepository; + + @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity>> getSKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + if (TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + return new ResponseEntity<>(classRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); + else + return new ResponseEntity<>(classRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); + } + + @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + List> rootIndividuals = null; + if(TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + rootIndividuals = classRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); + else + rootIndividuals = classRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); + StringBuilder sb = new StringBuilder(); + for (TreeNode root : rootIndividuals) { + sb.append(root.getIndex() + " , "+ root.getData().any().get("label").toString() + " , " + root.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); + } + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V2Entity(new JsonObject())); + String decodedIri; + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = classRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + if (topConcept.getData().any().get("iri").toString() == null) + throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); + return new ResponseEntity<>(topConcept, HttpStatus.OK); + } + + @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V2Entity(new JsonObject())); + String decodedIri; + StringBuilder sb = new StringBuilder(); + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = classRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + sb.append(topConcept.getIndex() + " , "+ topConcept.getData().any().get("label").toString() + " , " + topConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/relations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = classRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + + return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/displayrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = classRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + int count = 0; + for (V2Entity individual : conceptPage.getContent()) + sb.append(++count).append(" , ").append(individual.any().get("label").toString()).append(" , ").append(individual.any().get("iri").toString()).append("\n"); + + return new HttpEntity<>( sb.toString()); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/indirectrelations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + return new ResponseEntity<>( related, HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/displayindirectrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Parameter(description = "Page size to retrieve individuals", required = true) + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + int count = 0; + for (V2Entity individual : related) + sb.append(++count).append(" , ").append(individual.any().get("label").toString()).append(" , ").append(individual.any().get("iri").toString()).append("\n"); + + + return new ResponseEntity<>( sb.toString(), HttpStatus.OK); + + } + + @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") + @RequestMapping(path = "/{onto}/skos/{iri}/graph", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity retrieveImmediateGraph( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + + V2Entity subjectTerm = classRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + + related = classRepository.findRelated(ontologyId, decodedIri, "related",lang); + + List narrower = new ArrayList(); + narrower = classRepository.findRelated(ontologyId, decodedIri, "narrower",lang); + + List broader = new ArrayList(); + broader = classRepository.findRelated(ontologyId, decodedIri, "broader",lang); + + Set relatedNodes = new HashSet(); + related.forEach(term -> relatedNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + Set narrowerNodes = new HashSet(); + narrower.forEach(term -> narrowerNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + Set broaderNodes = new HashSet(); + broader.forEach(term -> broaderNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + + Set edges = new HashSet(); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); + + Set nodes = new HashSet(); + nodes.add(new Node(decodedIri,subjectTerm.any().get("label").toString())); + nodes.addAll(relatedNodes); + nodes.addAll(broaderNodes); + nodes.addAll(narrowerNodes); + + + Map graph = new HashMap(); + graph.put("nodes", nodes); + graph.put("edges", edges); + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + try { + return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { + StringBuilder sb = new StringBuilder(); + for (TreeNode childConcept : rootConcept.getChildren()) { + sb.append(childConcept.getIndex() + " , "+ childConcept.getData().any().get("label").toString() + " , " + childConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(childConcept,displayRelated)); + } + if(displayRelated) + for (TreeNode relatedConcept : rootConcept.getRelated()) { + sb.append(relatedConcept.getIndex() + " , "+ relatedConcept.getData().any().get("label").toString() + " , " + relatedConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(relatedConcept,displayRelated)); + } + return sb; + } + + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") + @ExceptionHandler(ResourceNotFoundException.class) + public void handleError(HttpServletRequest req, Exception exception) { + } + + public class Node { + String iri; + String label; + + public Node(String iri, String label) { + this.iri = iri; + this.label = label; + } + + public String getIri() { + return iri; + } + + public String getLabel() { + return label; + } + + } + + public class Edge { + String source; + String target; + String label; + String uri; + + public Edge(String source, String target, String label, String uri) { + this.source = source; + this.target = target; + this.label = label; + this.uri = uri; + } + + public String getSource() { + return source; + } + + public String getTarget() { + return target; + } + + public String getLabel() { + return label; + } + + public String getUri() { + return uri; + } + + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java index a1cb659e4..c2cd47052 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java @@ -2,6 +2,7 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; @@ -95,7 +96,8 @@ public JsonElement getFirst(OlsSolrQuery query) { if(qr.getResults().getNumFound() < 1) { logger.debug("Expected at least 1 result for solr getFirst for solr query = {}", query.constructQuery().jsonStr()); - throw new RuntimeException("Expected at least 1 result for solr getFirst"); + return new JsonObject(); + //throw new RuntimeException("Expected at least 1 result for solr getFirst"); } return getOlsEntityFromSolrResult(qr.getResults().get(0)); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java index 9e621e734..2c0612941 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java @@ -1,11 +1,15 @@ package uk.ac.ebi.spot.ols.repository.v2; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -15,14 +19,12 @@ import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; +import java.util.*; @Component public class V2ClassRepository { @@ -33,6 +35,17 @@ public class V2ClassRepository { @Autowired OlsNeo4jClient neo4jClient; + public String broader = "http://www.w3.org/2004/02/skos/core#broader"; + + public String narrower = "http://www.w3.org/2004/02/skos/core#narrower"; + + public String related = "http://www.w3.org/2004/02/skos/core#related"; + + public String hasTopConcept = "http://www.w3.org/2004/02/skos/core#hasTopConcept"; + + public String topConceptOf = "http://www.w3.org/2004/02/skos/core#topConceptOf"; + + public OlsFacetedResultsPage find( Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties) throws IOException { @@ -82,7 +95,7 @@ public OlsFacetedResultsPage findByOntologyId( .map(V2Entity::new); } - public V2Entity getByOntologyIdAndIri(String ontologyId, String iri, String lang) throws ResourceNotFoundException { + public V2Entity findByOntologyAndIri(String ontologyId, String iri, String lang) throws ResourceNotFoundException { Validation.validateOntologyId(ontologyId); Validation.validateLang(lang); @@ -169,4 +182,227 @@ public Page getIndividualAncestorsByOntologyId(String ontologyId, Page .map(RemoveLiteralDatatypesTransform::transform) .map(V2Entity::new); } + + + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + Map> properties = new HashMap<>(); + if(!obsoletes) + properties.put("isObsolete", List.of("false")); + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + List> rootTerms = new ArrayList>(); + int count = 0; + + if(schema) { + for (V2Entity term : listOfTerms) + if (term.any().get(hasTopConcept) != null) { + for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept)) { + V2Entity topConceptTerm = findTerm(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + } + rootTerms.add(topConcept); + } + } + } else for (V2Entity term : listOfTerms) { + TreeNode tree = new TreeNode(term); + + if (tree.isRoot() && term.any().get(topConceptOf) != null) { + tree.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(term,tree,listOfTerms); + else + populateChildrenandRelatedByBroader(term,tree,listOfTerms); + } + rootTerms.add(tree); + } + } + + return rootTerms; + } + + public List allClassesOfOntology(String ontologyId, Boolean obsoletes, Pageable pageable, String lang) throws IOException { + Map> properties = new HashMap<>(); + if(!obsoletes) + properties.put("isObsolete", List.of("false")); + + Page terms = this.findByOntologyId(ontologyId, pageable, lang, null, null, null, false, DynamicQueryHelper.filterProperties(properties)); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = findByOntologyId(ontologyId, terms.nextPageable(), lang, null, null, null, false, DynamicQueryHelper.filterProperties(properties)); + listOfTerms.addAll(terms.getContent()); + } + + return listOfTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTreeWithoutTop (String ontologyId, boolean isNarrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + Set rootIRIs = new HashSet(); + List> rootTerms = new ArrayList>(); + int count = 0; + if(!isNarrower) { + for (V2Entity term : listOfTerms) { + if(term.any() != null && term.any().get(broader) != null) { + for (String iriBroader : getRelationsAsList(term,broader)) { + V2Entity broaderTerm = findTerm(listOfTerms, iriBroader); + if (broaderTerm.any() != null && broaderTerm.any().get(broader) == null) { + rootIRIs.add(iriBroader); + } + + } + } + } + + for (String iri : rootIRIs) { + V2Entity topConceptTerm = findTerm(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + rootTerms.add(topConcept); + } + + } else { + for (V2Entity term : listOfTerms) { + if (term.any() != null && term.any().get(narrower) != null) { + boolean root = true; + for (V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null && V2Entity.any().get(narrower) != null) { + for (String iriNarrower : getRelationsAsList(V2Entity,narrower)) { + if (term.any().get("iri").equals(iriNarrower)) + root = false; + } + } + } + + if (root) { + TreeNode topConcept = new TreeNode(term); + topConcept.setIndex(String.valueOf(++count)); + if (withChildren) + populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); + rootTerms.add(topConcept); + } + } + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") + public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + V2Entity topConceptTerm = findTerm(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(index); + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + + return topConcept; + } + + public V2Entity findTerm(List wholeList, String iri) { + for (V2Entity term : wholeList) + if(term.any().get("iri").equals(iri)) + return term; + return new V2Entity(new JsonObject()); + } + + public List findRelated(String ontologyId, String iri, String relationType, String lang) { + List related = new ArrayList(); + V2Entity term = this.findByOntologyAndIri(ontologyId, iri, lang); + if (term != null) + if (term.any().get(relationType) != null) + for (String iriBroader : getRelationsAsList(term,relationType)) + related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); + + return related; + } + + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List related = new ArrayList(); + + V2Entity V2Entity = this.findByOntologyAndIri(ontologyId, iri, lang); + if(V2Entity == null) + return related; + if(V2Entity.any().get("iri") == null) + return related; + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + for (V2Entity term : listOfTerms) { + if (term != null) + if (term.any().get(relationType) != null) + for (String iriRelated : getRelationsAsList(term,relationType)) + if(iriRelated.equals(iri)) + related.add(term); + } + + return related; + } + + public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms ) { + + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related)) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + if (term.any() != null) + for (String iriChild : getRelationsAsList(term,narrower)) { + V2Entity childTerm = findTerm(listOfTerms, iriChild); + TreeNode child = new TreeNode(childTerm); + child.setIndex(tree.getIndex() + "." + ++count); + populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); + tree.addChild(child); + } + } + + public void populateChildrenandRelatedByBroader(V2Entity term, TreeNode tree, List listOfTerms) { + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related)) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + for ( V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null) + for (String iriBroader : getRelationsAsList(V2Entity,broader)) + if(term.any().get("iri") != null) + if (term.any().get("iri").equals(iriBroader)) { + TreeNode child = new TreeNode(V2Entity); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByBroader(V2Entity,child,listOfTerms); + tree.addChild(child); + } + } + } + + public List getRelationsAsList(V2Entity entity, String relationType){ + if(entity.any().get(relationType) instanceof String) + return Arrays.asList((String) entity.any().get(relationType)); + else + return (ArrayList) entity.any().getOrDefault(relationType, new ArrayList()); + } } From e4205e75ae51f52f7a388b8380318eaad18a0563 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 10 Apr 2024 15:58:44 +0200 Subject: [PATCH 035/146] refactored with enum for skos relations --- .../v2/V2OntologySKOSConceptController.java | 31 ++++++++-------- .../ebi/spot/ols/model/v2/SKOSRelation.java | 37 +++++++++++++++++++ .../ols/repository/v2/V2ClassRepository.java | 35 +++++++----------- 3 files changed, 65 insertions(+), 38 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java index 55147f0c7..b61a38f17 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java @@ -27,6 +27,7 @@ import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; import uk.ac.ebi.spot.ols.controller.api.v1.V1TermAssembler; import uk.ac.ebi.spot.ols.model.v1.V1Term; +import uk.ac.ebi.spot.ols.model.v2.SKOSRelation; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.v1.TreeNode; import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; @@ -166,7 +167,7 @@ public HttpEntity> findRelatedConcepts( @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) @RequestParam(value = "relation_type", required = true, defaultValue = "broader") - @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, Pageable pageable, @@ -175,8 +176,7 @@ public HttpEntity> findRelatedConcepts( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelated(ontologyId, decodedIri, relationType,lang); - + related = classRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); final int start = (int)pageable.getOffset(); final int end = Math.min((start + pageable.getPageSize()), related.size()); @@ -196,7 +196,7 @@ public HttpEntity displayRelatedConcepts( @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) @RequestParam(value = "relation_type", required = true, defaultValue = "broader") - @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, Pageable pageable, @@ -205,7 +205,7 @@ public HttpEntity displayRelatedConcepts( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelated(ontologyId, decodedIri, relationType,lang); + related = classRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); final int start = (int)pageable.getOffset(); final int end = Math.min((start + pageable.getPageSize()), related.size()); @@ -227,15 +227,14 @@ public HttpEntity> findRelatedConceptsIndirectly( @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) @RequestParam(value = "relation_type", required = true, defaultValue = "broader") - @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, Pageable pageable) throws IOException { - ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); return new ResponseEntity<>( related, HttpStatus.OK); @@ -251,7 +250,7 @@ public HttpEntity displayRelatedConceptsIndirectly( @PathVariable("iri") String iri, @Parameter(description = "skos based concept relation type", required = true) @RequestParam(value = "relation_type", required = true, defaultValue = "broader") - @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, @Parameter(description = "Page size to retrieve individuals", required = true) @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, @@ -260,7 +259,7 @@ public HttpEntity displayRelatedConceptsIndirectly( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); int count = 0; for (V2Entity individual : related) @@ -285,13 +284,13 @@ public HttpEntity retrieveImmediateGraph( V2Entity subjectTerm = classRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); - related = classRepository.findRelated(ontologyId, decodedIri, "related",lang); + related = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.related.getPropertyName(), lang); List narrower = new ArrayList(); - narrower = classRepository.findRelated(ontologyId, decodedIri, "narrower",lang); + narrower = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.narrower.getPropertyName(), lang); List broader = new ArrayList(); - broader = classRepository.findRelated(ontologyId, decodedIri, "broader",lang); + broader = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.broader.getPropertyName(), lang); Set relatedNodes = new HashSet(); related.forEach(term -> relatedNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); @@ -301,9 +300,9 @@ public HttpEntity retrieveImmediateGraph( broader.forEach(term -> broaderNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); Set edges = new HashSet(); - relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); - narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); - broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related",SKOSRelation.related.getPropertyName()))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower",SKOSRelation.narrower.getPropertyName()))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader",SKOSRelation.broader.getPropertyName()))); Set nodes = new HashSet(); nodes.add(new Node(decodedIri,subjectTerm.any().get("label").toString())); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java new file mode 100644 index 000000000..4450a8c62 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java @@ -0,0 +1,37 @@ +package uk.ac.ebi.spot.ols.model.v2; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public enum SKOSRelation { + + broader("http://www.w3.org/2004/02/skos/core#broader"), + + narrower("http://www.w3.org/2004/02/skos/core#narrower"), + + related("http://www.w3.org/2004/02/skos/core#related"), + + hasTopConcept("http://www.w3.org/2004/02/skos/core#hasTopConcept"), + + topConceptOf("http://www.w3.org/2004/02/skos/core#topConceptOf"); + + private final String propertyName; + + SKOSRelation(String propertyName) { + this.propertyName = propertyName; + } + + public static String[] getNames() { + String[] commands = new String[SKOSRelation.values().length]; + for (int i = 0;i find( Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties) throws IOException { @@ -199,8 +191,8 @@ public List> conceptTree (String ontologyId, boolean schema, if(schema) { for (V2Entity term : listOfTerms) - if (term.any().get(hasTopConcept) != null) { - for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept)) { + if (term.any().get(hasTopConcept.getPropertyName()) != null) { + for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept.getPropertyName())) { V2Entity topConceptTerm = findTerm(listOfTerms,iriTopConcept); TreeNode topConcept = new TreeNode(topConceptTerm); topConcept.setIndex(String.valueOf(++count)); @@ -216,7 +208,7 @@ public List> conceptTree (String ontologyId, boolean schema, } else for (V2Entity term : listOfTerms) { TreeNode tree = new TreeNode(term); - if (tree.isRoot() && term.any().get(topConceptOf) != null) { + if (tree.isRoot() && term.any().get(topConceptOf.getPropertyName()) != null) { tree.setIndex(String.valueOf(++count)); if(withChildren) { if(narrower) @@ -258,10 +250,10 @@ public List> conceptTreeWithoutTop (String ontologyId, boolea int count = 0; if(!isNarrower) { for (V2Entity term : listOfTerms) { - if(term.any() != null && term.any().get(broader) != null) { - for (String iriBroader : getRelationsAsList(term,broader)) { + if(term.any() != null && term.any().get(broader.getPropertyName()) != null) { + for (String iriBroader : getRelationsAsList(term,broader.getPropertyName())) { V2Entity broaderTerm = findTerm(listOfTerms, iriBroader); - if (broaderTerm.any() != null && broaderTerm.any().get(broader) == null) { + if (broaderTerm.any() != null && broaderTerm.any().get(broader.getPropertyName()) == null) { rootIRIs.add(iriBroader); } @@ -284,7 +276,7 @@ public List> conceptTreeWithoutTop (String ontologyId, boolea boolean root = true; for (V2Entity V2Entity : listOfTerms) { if (V2Entity.any() != null && V2Entity.any().get(narrower) != null) { - for (String iriNarrower : getRelationsAsList(V2Entity,narrower)) { + for (String iriNarrower : getRelationsAsList(V2Entity,narrower.getPropertyName())) { if (term.any().get("iri").equals(iriNarrower)) root = false; } @@ -333,7 +325,6 @@ public List findRelated(String ontologyId, String iri, String relation if (term.any().get(relationType) != null) for (String iriBroader : getRelationsAsList(term,relationType)) related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); - return related; } @@ -362,14 +353,14 @@ public List findRelated(String ontologyId, String iri, String relation public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms ) { if (term.any() != null) - for (String iriRelated : getRelationsAsList(term,related)) { + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); related.setIndex(tree.getIndex() + ".related"); tree.addRelated(related); } int count = 0; if (term.any() != null) - for (String iriChild : getRelationsAsList(term,narrower)) { + for (String iriChild : getRelationsAsList(term,narrower.getPropertyName())) { V2Entity childTerm = findTerm(listOfTerms, iriChild); TreeNode child = new TreeNode(childTerm); child.setIndex(tree.getIndex() + "." + ++count); @@ -380,7 +371,7 @@ public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms) { if (term.any() != null) - for (String iriRelated : getRelationsAsList(term,related)) { + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); related.setIndex(tree.getIndex() + ".related"); tree.addRelated(related); @@ -388,7 +379,7 @@ public void populateChildrenandRelatedByBroader(V2Entity term, TreeNode child = new TreeNode(V2Entity); From 60eb8860564714f9416431831d99e17ee3709791 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 11 Apr 2024 11:36:18 +0200 Subject: [PATCH 036/146] refactored the whole skos functionaliuty of v1 and v2 for EBISPOT#625 and TIBHAnnover#1 --- .../v1/V1OntologySKOSConceptController.java | 66 +----- .../v2/V2OntologySKOSConceptController.java | 103 ++------- .../java/uk/ac/ebi/spot/ols/model/Edge.java | 37 +++ .../java/uk/ac/ebi/spot/ols/model/Node.java | 25 ++ .../spot/ols/model/{v2 => }/SKOSRelation.java | 2 +- .../ols/repository/v2/V2ClassRepository.java | 206 +---------------- .../ols/repository/v2/V2SKOSRepository.java | 217 ++++++++++++++++++ 7 files changed, 316 insertions(+), 340 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/model/Edge.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/model/Node.java rename backend/src/main/java/uk/ac/ebi/spot/ols/model/{v2 => }/SKOSRelation.java (96%) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java index 44415e81c..42a8a94d9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -25,6 +25,9 @@ import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; +import uk.ac.ebi.spot.ols.model.Edge; +import uk.ac.ebi.spot.ols.model.Node; +import uk.ac.ebi.spot.ols.model.SKOSRelation; import uk.ac.ebi.spot.ols.model.v1.V1Term; import uk.ac.ebi.spot.ols.repository.v1.TreeNode; import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; @@ -33,9 +36,9 @@ import java.util.*; /** - * @author Simon Jupp - * @date 02/11/15 - * Samples, Phenotypes and Ontologies Team, EMBL-EBI + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology */ @RestController @RequestMapping("/api/ontologies") @@ -302,9 +305,9 @@ public HttpEntity retrieveImmediateGraph( broader.forEach(term -> broaderNodes.add(new Node(term.iri, term.label))); Set edges = new HashSet(); - relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related","http://www.w3.org/2004/02/skos/core#related"))); - narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower","http://www.w3.org/2004/02/skos/core#narrower"))); - broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader","http://www.w3.org/2004/02/skos/core#broader"))); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "related", SKOSRelation.related.getPropertyName()))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "narrower",SKOSRelation.narrower.getPropertyName()))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "broader",SKOSRelation.broader.getPropertyName()))); Set nodes = new HashSet(); nodes.add(new Node(decodedIri,subjectTerm.label)); @@ -342,55 +345,4 @@ public StringBuilder generateConceptHierarchyTextByOntology(TreeNode roo @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { } - - public class Node { - String iri; - String label; - - public Node(String iri, String label) { - this.iri = iri; - this.label = label; - } - - public String getIri() { - return iri; - } - - public String getLabel() { - return label; - } - - } - - public class Edge { - String source; - String target; - String label; - String uri; - - public Edge(String source, String target, String label, String uri) { - this.source = source; - this.target = target; - this.label = label; - this.uri = uri; - } - - public String getSource() { - return source; - } - - public String getTarget() { - return target; - } - - public String getLabel() { - return label; - } - - public String getUri() { - return uri; - } - - } - } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java index b61a38f17..a8d102245 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java @@ -8,8 +8,6 @@ import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.media.Schema; import io.swagger.v3.oas.annotations.tags.Tag; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageImpl; @@ -25,22 +23,21 @@ import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; -import uk.ac.ebi.spot.ols.controller.api.v1.V1TermAssembler; -import uk.ac.ebi.spot.ols.model.v1.V1Term; -import uk.ac.ebi.spot.ols.model.v2.SKOSRelation; +import uk.ac.ebi.spot.ols.model.Edge; +import uk.ac.ebi.spot.ols.model.Node; +import uk.ac.ebi.spot.ols.model.SKOSRelation; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.v1.TreeNode; -import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; -import uk.ac.ebi.spot.ols.repository.v2.V2ClassRepository; +import uk.ac.ebi.spot.ols.repository.v2.V2SKOSRepository; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.util.*; /** - * @author Simon Jupp - * @date 02/11/15 - * Samples, Phenotypes and Ontologies Team, EMBL-EBI + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology */ @RestController @RequestMapping("/api/v2/ontologies") @@ -48,7 +45,7 @@ public class V2OntologySKOSConceptController { @Autowired - V2ClassRepository classRepository; + V2SKOSRepository skosRepository; @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") @RequestMapping(path = "/{onto}/skos/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) @@ -66,9 +63,9 @@ HttpEntity>> getSKOSConceptHierarchyByOntology( Pageable pageable) throws IOException { ontologyId = ontologyId.toLowerCase(); if (TopConceptEnum.RELATIONSHIPS == topConceptIdentification) - return new ResponseEntity<>(classRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); + return new ResponseEntity<>(skosRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); else - return new ResponseEntity<>(classRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); + return new ResponseEntity<>(skosRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); } @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") @@ -91,9 +88,9 @@ HttpEntity displaySKOSConceptHierarchyByOntology( ontologyId = ontologyId.toLowerCase(); List> rootIndividuals = null; if(TopConceptEnum.RELATIONSHIPS == topConceptIdentification) - rootIndividuals = classRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); + rootIndividuals = skosRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); else - rootIndividuals = classRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); + rootIndividuals = skosRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); StringBuilder sb = new StringBuilder(); for (TreeNode root : rootIndividuals) { sb.append(root.getIndex() + " , "+ root.getData().any().get("label").toString() + " , " + root.getData().any().get("iri").toString()).append("\n"); @@ -121,7 +118,7 @@ HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( TreeNode topConcept = new TreeNode(new V2Entity(new JsonObject())); String decodedIri; decodedIri = UriUtils.decode(iri, "UTF-8"); - topConcept = classRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + topConcept = skosRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); if (topConcept.getData().any().get("iri").toString() == null) throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); @@ -150,7 +147,7 @@ HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( String decodedIri; StringBuilder sb = new StringBuilder(); decodedIri = UriUtils.decode(iri, "UTF-8"); - topConcept = classRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + topConcept = skosRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); sb.append(topConcept.getIndex() + " , "+ topConcept.getData().any().get("label").toString() + " , " + topConcept.getData().any().get("iri").toString()).append("\n"); sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); @@ -176,7 +173,7 @@ public HttpEntity> findRelatedConcepts( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); + related = skosRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); final int start = (int)pageable.getOffset(); final int end = Math.min((start + pageable.getPageSize()), related.size()); @@ -205,7 +202,7 @@ public HttpEntity displayRelatedConcepts( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); + related = skosRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); final int start = (int)pageable.getOffset(); final int end = Math.min((start + pageable.getPageSize()), related.size()); @@ -234,7 +231,7 @@ public HttpEntity> findRelatedConceptsIndirectly( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); + related = skosRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); return new ResponseEntity<>( related, HttpStatus.OK); @@ -259,7 +256,7 @@ public HttpEntity displayRelatedConceptsIndirectly( ontologyId = ontologyId.toLowerCase(); List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - related = classRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); + related = skosRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); int count = 0; for (V2Entity individual : related) @@ -282,15 +279,15 @@ public HttpEntity retrieveImmediateGraph( List related = new ArrayList(); String decodedIri = UriUtils.decode(iri, "UTF-8"); - V2Entity subjectTerm = classRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + V2Entity subjectTerm = skosRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); - related = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.related.getPropertyName(), lang); + related = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.related.getPropertyName(), lang); List narrower = new ArrayList(); - narrower = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.narrower.getPropertyName(), lang); + narrower = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.narrower.getPropertyName(), lang); List broader = new ArrayList(); - broader = classRepository.findRelated(ontologyId, decodedIri, SKOSRelation.broader.getPropertyName(), lang); + broader = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.broader.getPropertyName(), lang); Set relatedNodes = new HashSet(); related.forEach(term -> relatedNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); @@ -300,9 +297,9 @@ public HttpEntity retrieveImmediateGraph( broader.forEach(term -> broaderNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); Set edges = new HashSet(); - relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "related",SKOSRelation.related.getPropertyName()))); - narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "narrower",SKOSRelation.narrower.getPropertyName()))); - broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.iri, "broader",SKOSRelation.broader.getPropertyName()))); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "related",SKOSRelation.related.getPropertyName()))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "narrower",SKOSRelation.narrower.getPropertyName()))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "broader",SKOSRelation.broader.getPropertyName()))); Set nodes = new HashSet(); nodes.add(new Node(decodedIri,subjectTerm.any().get("label").toString())); @@ -341,54 +338,4 @@ public StringBuilder generateConceptHierarchyTextByOntology(TreeNode r public void handleError(HttpServletRequest req, Exception exception) { } - public class Node { - String iri; - String label; - - public Node(String iri, String label) { - this.iri = iri; - this.label = label; - } - - public String getIri() { - return iri; - } - - public String getLabel() { - return label; - } - - } - - public class Edge { - String source; - String target; - String label; - String uri; - - public Edge(String source, String target, String label, String uri) { - this.source = source; - this.target = target; - this.label = label; - this.uri = uri; - } - - public String getSource() { - return source; - } - - public String getTarget() { - return target; - } - - public String getLabel() { - return label; - } - - public String getUri() { - return uri; - } - - } - } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/Edge.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/Edge.java new file mode 100644 index 000000000..ba2a19984 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/Edge.java @@ -0,0 +1,37 @@ +package uk.ac.ebi.spot.ols.model; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class Edge { + String source; + String target; + String label; + String uri; + + public Edge(String source, String target, String label, String uri) { + this.source = source; + this.target = target; + this.label = label; + this.uri = uri; + } + + public String getSource() { + return source; + } + + public String getTarget() { + return target; + } + + public String getLabel() { + return label; + } + + public String getUri() { + return uri; + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/Node.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/Node.java new file mode 100644 index 000000000..a73bba608 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/Node.java @@ -0,0 +1,25 @@ +package uk.ac.ebi.spot.ols.model; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class Node { + String iri; + String label; + + public Node(String iri, String label) { + this.iri = iri; + this.label = label; + } + + public String getIri() { + return iri; + } + + public String getLabel() { + return label; + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/SKOSRelation.java similarity index 96% rename from backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java rename to backend/src/main/java/uk/ac/ebi/spot/ols/model/SKOSRelation.java index 4450a8c62..f127360d6 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v2/SKOSRelation.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/SKOSRelation.java @@ -1,4 +1,4 @@ -package uk.ac.ebi.spot.ols.model.v2; +package uk.ac.ebi.spot.ols.model; /** * @author Erhun Giray TUNCAY diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java index 41f04cefb..74287ccd3 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java @@ -1,10 +1,8 @@ package uk.ac.ebi.spot.ols.repository.v2; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.cache.annotation.Cacheable; +import org.springframework.context.annotation.Primary; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; @@ -19,15 +17,12 @@ import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; -import uk.ac.ebi.spot.ols.repository.v1.TreeNode; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; import java.io.IOException; import java.util.*; - -import static uk.ac.ebi.spot.ols.model.v2.SKOSRelation.*; - +@Primary @Component public class V2ClassRepository { @@ -176,53 +171,6 @@ public Page getIndividualAncestorsByOntologyId(String ontologyId, Page } - - @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") - public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { - - Map> properties = new HashMap<>(); - if(!obsoletes) - properties.put("isObsolete", List.of("false")); - - List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); - - List> rootTerms = new ArrayList>(); - int count = 0; - - if(schema) { - for (V2Entity term : listOfTerms) - if (term.any().get(hasTopConcept.getPropertyName()) != null) { - for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept.getPropertyName())) { - V2Entity topConceptTerm = findTerm(listOfTerms,iriTopConcept); - TreeNode topConcept = new TreeNode(topConceptTerm); - topConcept.setIndex(String.valueOf(++count)); - if(withChildren) { - if(narrower) - populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); - else - populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); - } - rootTerms.add(topConcept); - } - } - } else for (V2Entity term : listOfTerms) { - TreeNode tree = new TreeNode(term); - - if (tree.isRoot() && term.any().get(topConceptOf.getPropertyName()) != null) { - tree.setIndex(String.valueOf(++count)); - if(withChildren) { - if(narrower) - populateChildrenandRelatedByNarrower(term,tree,listOfTerms); - else - populateChildrenandRelatedByBroader(term,tree,listOfTerms); - } - rootTerms.add(tree); - } - } - - return rootTerms; - } - public List allClassesOfOntology(String ontologyId, Boolean obsoletes, Pageable pageable, String lang) throws IOException { Map> properties = new HashMap<>(); if(!obsoletes) @@ -240,156 +188,6 @@ public List allClassesOfOntology(String ontologyId, Boolean obsoletes, return listOfTerms; } - @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") - public List> conceptTreeWithoutTop (String ontologyId, boolean isNarrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { - - List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); - - Set rootIRIs = new HashSet(); - List> rootTerms = new ArrayList>(); - int count = 0; - if(!isNarrower) { - for (V2Entity term : listOfTerms) { - if(term.any() != null && term.any().get(broader.getPropertyName()) != null) { - for (String iriBroader : getRelationsAsList(term,broader.getPropertyName())) { - V2Entity broaderTerm = findTerm(listOfTerms, iriBroader); - if (broaderTerm.any() != null && broaderTerm.any().get(broader.getPropertyName()) == null) { - rootIRIs.add(iriBroader); - } - - } - } - } - - for (String iri : rootIRIs) { - V2Entity topConceptTerm = findTerm(listOfTerms, iri); - TreeNode topConcept = new TreeNode(topConceptTerm); - topConcept.setIndex(String.valueOf(++count)); - if(withChildren) - populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); - rootTerms.add(topConcept); - } - - } else { - for (V2Entity term : listOfTerms) { - if (term.any() != null && term.any().get(narrower) != null) { - boolean root = true; - for (V2Entity V2Entity : listOfTerms) { - if (V2Entity.any() != null && V2Entity.any().get(narrower) != null) { - for (String iriNarrower : getRelationsAsList(V2Entity,narrower.getPropertyName())) { - if (term.any().get("iri").equals(iriNarrower)) - root = false; - } - } - } - - if (root) { - TreeNode topConcept = new TreeNode(term); - topConcept.setIndex(String.valueOf(++count)); - if (withChildren) - populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); - rootTerms.add(topConcept); - } - } - } - } - - return rootTerms; - } - - @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") - public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable) throws IOException { - List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); - V2Entity topConceptTerm = findTerm(listOfTerms,iri); - TreeNode topConcept = new TreeNode(topConceptTerm); - topConcept.setIndex(index); - if(narrower) - populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); - else - populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); - - return topConcept; - } - - public V2Entity findTerm(List wholeList, String iri) { - for (V2Entity term : wholeList) - if(term.any().get("iri").equals(iri)) - return term; - return new V2Entity(new JsonObject()); - } - - public List findRelated(String ontologyId, String iri, String relationType, String lang) { - List related = new ArrayList(); - V2Entity term = this.findByOntologyAndIri(ontologyId, iri, lang); - if (term != null) - if (term.any().get(relationType) != null) - for (String iriBroader : getRelationsAsList(term,relationType)) - related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); - return related; - } - - public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable) throws IOException { - List related = new ArrayList(); - - V2Entity V2Entity = this.findByOntologyAndIri(ontologyId, iri, lang); - if(V2Entity == null) - return related; - if(V2Entity.any().get("iri") == null) - return related; - - List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); - - for (V2Entity term : listOfTerms) { - if (term != null) - if (term.any().get(relationType) != null) - for (String iriRelated : getRelationsAsList(term,relationType)) - if(iriRelated.equals(iri)) - related.add(term); - } - - return related; - } - - public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms ) { - - if (term.any() != null) - for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { - TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); - related.setIndex(tree.getIndex() + ".related"); - tree.addRelated(related); - } - int count = 0; - if (term.any() != null) - for (String iriChild : getRelationsAsList(term,narrower.getPropertyName())) { - V2Entity childTerm = findTerm(listOfTerms, iriChild); - TreeNode child = new TreeNode(childTerm); - child.setIndex(tree.getIndex() + "." + ++count); - populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); - tree.addChild(child); - } - } - - public void populateChildrenandRelatedByBroader(V2Entity term, TreeNode tree, List listOfTerms) { - if (term.any() != null) - for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { - TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); - related.setIndex(tree.getIndex() + ".related"); - tree.addRelated(related); - } - int count = 0; - for ( V2Entity V2Entity : listOfTerms) { - if (V2Entity.any() != null) - for (String iriBroader : getRelationsAsList(V2Entity,broader.getPropertyName())) - if(term.any().get("iri") != null) - if (term.any().get("iri").equals(iriBroader)) { - TreeNode child = new TreeNode(V2Entity); - child.setIndex(tree.getIndex()+"."+ ++count); - populateChildrenandRelatedByBroader(V2Entity,child,listOfTerms); - tree.addChild(child); - } - } - } - public List getRelationsAsList(V2Entity entity, String relationType){ if(entity.any().get(relationType) instanceof String) return Arrays.asList((String) entity.any().get(relationType)); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java new file mode 100644 index 000000000..f20f4f183 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java @@ -0,0 +1,217 @@ +package uk.ac.ebi.spot.ols.repository.v2; + +import com.google.gson.JsonObject; +import org.springframework.cache.annotation.Cacheable; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.v2.V2Entity; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; + +import java.io.IOException; +import java.util.*; + +import static uk.ac.ebi.spot.ols.model.SKOSRelation.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +@Component +public class V2SKOSRepository extends V2ClassRepository { + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + Map> properties = new HashMap<>(); + if(!obsoletes) + properties.put("isObsolete", List.of("false")); + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + List> rootTerms = new ArrayList>(); + int count = 0; + + if(schema) { + for (V2Entity term : listOfTerms) + if (term.any().get(hasTopConcept.getPropertyName()) != null) { + for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept.getPropertyName())) { + V2Entity topConceptTerm = findTerm(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + } + rootTerms.add(topConcept); + } + } + } else for (V2Entity term : listOfTerms) { + TreeNode tree = new TreeNode(term); + + if (tree.isRoot() && term.any().get(topConceptOf.getPropertyName()) != null) { + tree.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(term,tree,listOfTerms); + else + populateChildrenandRelatedByBroader(term,tree,listOfTerms); + } + rootTerms.add(tree); + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTreeWithoutTop (String ontologyId, boolean isNarrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + Set rootIRIs = new HashSet(); + List> rootTerms = new ArrayList>(); + int count = 0; + if(!isNarrower) { + for (V2Entity term : listOfTerms) { + if(term.any() != null && term.any().get(broader.getPropertyName()) != null) { + for (String iriBroader : getRelationsAsList(term,broader.getPropertyName())) { + V2Entity broaderTerm = findTerm(listOfTerms, iriBroader); + if (broaderTerm.any() != null && broaderTerm.any().get(broader.getPropertyName()) == null) { + rootIRIs.add(iriBroader); + } + + } + } + } + + for (String iri : rootIRIs) { + V2Entity topConceptTerm = findTerm(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + rootTerms.add(topConcept); + } + + } else { + for (V2Entity term : listOfTerms) { + if (term.any() != null && term.any().get(narrower) != null) { + boolean root = true; + for (V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null && V2Entity.any().get(narrower) != null) { + for (String iriNarrower : getRelationsAsList(V2Entity,narrower.getPropertyName())) { + if (term.any().get("iri").equals(iriNarrower)) + root = false; + } + } + } + + if (root) { + TreeNode topConcept = new TreeNode(term); + topConcept.setIndex(String.valueOf(++count)); + if (withChildren) + populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); + rootTerms.add(topConcept); + } + } + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") + public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + V2Entity topConceptTerm = findTerm(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(index); + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + + return topConcept; + } + + public V2Entity findTerm(List wholeList, String iri) { + for (V2Entity term : wholeList) + if(term.any().get("iri").equals(iri)) + return term; + return new V2Entity(new JsonObject()); + } + + public List findRelated(String ontologyId, String iri, String relationType, String lang) { + List related = new ArrayList(); + V2Entity term = this.findByOntologyAndIri(ontologyId, iri, lang); + if (term != null) + if (term.any().get(relationType) != null) + for (String iriBroader : getRelationsAsList(term,relationType)) + related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); + return related; + } + + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List related = new ArrayList(); + + V2Entity V2Entity = this.findByOntologyAndIri(ontologyId, iri, lang); + if(V2Entity == null) + return related; + if(V2Entity.any().get("iri") == null) + return related; + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + for (V2Entity term : listOfTerms) { + if (term != null) + if (term.any().get(relationType) != null) + for (String iriRelated : getRelationsAsList(term,relationType)) + if(iriRelated.equals(iri)) + related.add(term); + } + + return related; + } + + public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms ) { + + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + if (term.any() != null) + for (String iriChild : getRelationsAsList(term,narrower.getPropertyName())) { + V2Entity childTerm = findTerm(listOfTerms, iriChild); + TreeNode child = new TreeNode(childTerm); + child.setIndex(tree.getIndex() + "." + ++count); + populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); + tree.addChild(child); + } + } + + public void populateChildrenandRelatedByBroader(V2Entity term, TreeNode tree, List listOfTerms) { + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + for ( V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null) + for (String iriBroader : getRelationsAsList(V2Entity,broader.getPropertyName())) + if(term.any().get("iri") != null) + if (term.any().get("iri").equals(iriBroader)) { + TreeNode child = new TreeNode(V2Entity); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByBroader(V2Entity,child,listOfTerms); + tree.addChild(child); + } + } + } +} From 6afd55ff7f6d1155267ac353f1df51c259c46a3c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 11 Apr 2024 12:47:59 +0200 Subject: [PATCH 037/146] reverted the get first method of olssolrclient --- .../uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java index c2cd47052..6168d4f31 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java @@ -96,8 +96,8 @@ public JsonElement getFirst(OlsSolrQuery query) { if(qr.getResults().getNumFound() < 1) { logger.debug("Expected at least 1 result for solr getFirst for solr query = {}", query.constructQuery().jsonStr()); - return new JsonObject(); - //throw new RuntimeException("Expected at least 1 result for solr getFirst"); + //return new JsonObject(); + throw new RuntimeException("Expected at least 1 result for solr getFirst"); } return getOlsEntityFromSolrResult(qr.getResults().get(0)); From 1b05e3839a26bc05761495caaa11f7ac2fbd99e9 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 15 Apr 2024 15:03:42 +0200 Subject: [PATCH 038/146] added composite classification based filtering as well as string and collection based filtering options for #3 --- .../api/v2/V2OntologyController.java | 5 +- .../api/v2/V2StatisticsController.java | 8 +- .../repository/v2/V2OntologyRepository.java | 85 ++++++++++++++++--- 3 files changed, 80 insertions(+), 18 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 28e82333b..903279e51 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -56,7 +56,8 @@ public HttpEntity> getOntologies( @RequestParam(value = "classification", required = false) List classifications, @RequestParam(value = "ontology", required = false) List ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") - @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @RequestParam(value = "composite", required = false, defaultValue = "false") boolean filterComposite ) throws ResourceNotFoundException, IOException { Map> properties = new HashMap<>(); if(!includeObsoleteEntities) @@ -65,7 +66,7 @@ public HttpEntity> getOntologies( return new ResponseEntity<>( new V2PagedAndFacetedResponse<>( - ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive) + ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive,filterComposite) ), HttpStatus.OK); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 0db4fe7a0..57a706e40 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -16,7 +16,6 @@ import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; -import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; import java.io.IOException; @@ -44,11 +43,12 @@ public HttpEntity getStatistics( @RequestParam(value = "schema", required = false) Collection schemas, @RequestParam(value = "classification", required = false) Collection classifications, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") - @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, @RequestParam(value = "ontologyIds", required = false) Collection ontologyIds, + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @RequestParam(value = "composite", required = false, defaultValue = "true") boolean filterComposite, @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ - ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,lang); + ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterComposite,lang); StringBuilder sb = new StringBuilder(); String queryString = "none"; if(ontologyIds != null){ @@ -74,7 +74,7 @@ HttpEntity getStatisticsBySchema( Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); for (String value : values) { - summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), false,Collections.emptySet(),lang)); + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,true,lang)); } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index 2cf34acbf..dbf8b02ce 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -7,7 +7,6 @@ import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; -import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -17,16 +16,11 @@ import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; -import uk.ac.ebi.spot.ols.repository.v1.mappers.V1OntologyMapper; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; - -import java.lang.reflect.Field; import java.util.*; - import java.io.IOException; -import static java.util.stream.Collectors.toCollection; @Component public class V2OntologyRepository { @@ -39,7 +33,7 @@ public class V2OntologyRepository { public OlsFacetedResultsPage find( - Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive) throws IOException { + Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive,boolean composite) throws IOException { Validation.validateLang(lang); @@ -53,7 +47,7 @@ public OlsFacetedResultsPage find( query.setExactMatch(exactMatch); query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); System.out.println("0"); - Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, lang); + Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, composite, lang); if(filteredOntologies != null){ for (String ontologyId : filteredOntologies) Validation.validateOntologyId(ontologyId); @@ -109,7 +103,7 @@ public Set getOntologies(String lang){ } - public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, String lang){ + public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, boolean composite, String lang){ if (schemas != null) schemas.remove(""); if (classifications != null) @@ -120,7 +114,11 @@ public Collection filterOntologyIDs(Collection schemas,Collectio return null; if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) return ontologies; - Set documents = filter(schemas, classifications, exclusiveFilter,lang); + Set documents; + if(composite) + documents = filterComposite(schemas, classifications, exclusiveFilter,lang); + else + documents = filter(schemas, classifications, exclusiveFilter,lang); Set filteredOntologySet = new HashSet(); for (V2Entity document : documents){ filteredOntologySet.add(document.any().get("ontologyId").toString()); @@ -155,7 +153,7 @@ else if (schemas != null) return postFilterOntologySet; } - public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + public Set filterComposite(Collection schemas, Collection classifications, boolean exclusive, String lang){ Set tempSet = new HashSet(); if(schemas != null && classifications != null) if(!exclusive) { @@ -218,6 +216,70 @@ public Set filter(Collection schemas, Collection class return tempSet; } + public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + if(exclusive) + return exclusiveFilter(schemas,classifications,lang); + else + return inclusiveFilter(schemas,classifications,lang); + } + + public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + for (String key : ontology.any().keySet()){ + if (schemas.contains(key)){ + if(ontology.any().get(key) != null) + if (ontology.any().get(key) instanceof Collection) { + for (String ontologyClassification : (Collection) ontology.any().get(key)){ + if(classifications.contains(ontologyClassification)) + filteredSet.add(ontology); + } + } else if (ontology.any().get(key) instanceof String) { + if(ontology.any().get(key) != null) + if(classifications.contains(ontology.any().get(key))) + filteredSet.add(ontology); + } + } + } + } + return filteredSet; + } + + public Set exclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + Set fieldSet =ontology.any().keySet(); + if (fieldSet.containsAll(schemas)){ + Set tempClassifications = new HashSet(); + for (String key : ontology.any().keySet()){ + if (ontology.any().get(key) instanceof Collection){ + if(ontology.any().get(key) != null) + for (String classification : classifications){ + if(((Collection) ontology.any().get(key)).contains(classification)) + tempClassifications.add(classification); + } + + } + else if (ontology.any().get(key) instanceof String) { + if(ontology.any().get(key) != null) + if(classifications.contains((String) ontology.any().get(key))) + tempClassifications.add( (String) ontology.any().get(key)); + } + + } + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + } + } + return filteredSet; + } + public Set getSchemaKeys(String lang){ Set tempSet = new HashSet(); tempSet.addAll(getOntologies(lang)); @@ -250,7 +312,6 @@ public Set getSchemaValues(Collection schemas,String lang){ } return values; } - } From ebbeeca2a6041bd16a71f7020441c803fd531957 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 16 Apr 2024 16:47:47 +0200 Subject: [PATCH 039/146] added filter option for licenses in #3 --- .../api/v2/V2OntologyController.java | 5 +- .../api/v2/V2StatisticsController.java | 9 ++- .../ac/ebi/spot/ols/model/FilterOption.java | 12 ++++ .../ebi/spot/ols/model/{v1 => }/License.java | 2 +- .../spot/ols/model/v1/V1OntologyConfig.java | 1 + .../v1/mappers/V1OntologyMapper.java | 2 +- .../repository/v2/V2OntologyRepository.java | 58 +++++++++++++++---- 7 files changed, 71 insertions(+), 18 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/model/FilterOption.java rename backend/src/main/java/uk/ac/ebi/spot/ols/model/{v1 => }/License.java (96%) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 903279e51..34eb4e4e7 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -22,6 +22,7 @@ import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.solr.OlsFacetedResultsPage; @@ -57,7 +58,7 @@ public HttpEntity> getOntologies( @RequestParam(value = "ontology", required = false) List ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "composite", required = false, defaultValue = "false") boolean filterComposite + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption ) throws ResourceNotFoundException, IOException { Map> properties = new HashMap<>(); if(!includeObsoleteEntities) @@ -66,7 +67,7 @@ public HttpEntity> getOntologies( return new ResponseEntity<>( new V2PagedAndFacetedResponse<>( - ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive,filterComposite) + ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive,filterOption) ), HttpStatus.OK); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 57a706e40..229ef2a14 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -14,6 +14,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; @@ -45,10 +46,10 @@ public HttpEntity getStatistics( @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "ontologyIds", required = false) Collection ontologyIds, @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "composite", required = false, defaultValue = "true") boolean filterComposite, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ - ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterComposite,lang); + ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterOption,lang); StringBuilder sb = new StringBuilder(); String queryString = "none"; if(ontologyIds != null){ @@ -63,6 +64,8 @@ public HttpEntity getStatistics( @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity getStatisticsBySchema( @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "lang", defaultValue = "en") String lang ) throws IOException { @@ -74,7 +77,7 @@ HttpEntity getStatisticsBySchema( Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); for (String value : values) { - summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,true,lang)); + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),exclusive,filterOption,lang)); } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/FilterOption.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/FilterOption.java new file mode 100644 index 000000000..857a41128 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/FilterOption.java @@ -0,0 +1,12 @@ +package uk.ac.ebi.spot.ols.model; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public enum FilterOption { + COMPOSITE, + LINEAR, + LICENSE; +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/License.java similarity index 96% rename from backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java rename to backend/src/main/java/uk/ac/ebi/spot/ols/model/License.java index 82cdee9b0..e9072eb5e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/License.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/License.java @@ -1,4 +1,4 @@ -package uk.ac.ebi.spot.ols.model.v1; +package uk.ac.ebi.spot.ols.model; import io.swagger.v3.oas.annotations.media.Schema; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java index 9e0b3458f..253d6a97a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/model/v1/V1OntologyConfig.java @@ -1,6 +1,7 @@ package uk.ac.ebi.spot.ols.model.v1; import com.google.gson.annotations.SerializedName; +import uk.ac.ebi.spot.ols.model.License; import java.util.Collection; import java.util.HashSet; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 4d772ab85..56f9283b8 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -3,7 +3,7 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; -import uk.ac.ebi.spot.ols.model.v1.License; +import uk.ac.ebi.spot.ols.model.License; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v1.V1OntologyConfig; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index dbf8b02ce..ff7e478b9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -3,10 +3,12 @@ import com.google.common.collect.Sets; import com.google.gson.JsonElement; +import com.google.gson.internal.LinkedTreeMap; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -33,7 +35,7 @@ public class V2OntologyRepository { public OlsFacetedResultsPage find( - Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive,boolean composite) throws IOException { + Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive,FilterOption filterOption) throws IOException { Validation.validateLang(lang); @@ -47,7 +49,7 @@ public OlsFacetedResultsPage find( query.setExactMatch(exactMatch); query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); System.out.println("0"); - Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, composite, lang); + Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, filterOption, lang); if(filteredOntologies != null){ for (String ontologyId : filteredOntologies) Validation.validateOntologyId(ontologyId); @@ -98,12 +100,10 @@ public Set getOntologies(String lang){ lang ) )); - return entities; - } - public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, boolean composite, String lang){ + public Collection filterOntologyIDs(Collection schemas, Collection classifications, Collection ontologies, boolean exclusiveFilter, FilterOption filterOption, String lang){ if (schemas != null) schemas.remove(""); if (classifications != null) @@ -115,10 +115,12 @@ public Collection filterOntologyIDs(Collection schemas,Collectio if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) return ontologies; Set documents; - if(composite) + if(FilterOption.COMPOSITE == filterOption) documents = filterComposite(schemas, classifications, exclusiveFilter,lang); - else + else if (FilterOption.LINEAR == filterOption) documents = filter(schemas, classifications, exclusiveFilter,lang); + else + documents = filterLicense(schemas, classifications, exclusiveFilter,lang); Set filteredOntologySet = new HashSet(); for (V2Entity document : documents){ filteredOntologySet.add(document.any().get("ontologyId").toString()); @@ -264,14 +266,11 @@ public Set exclusiveFilter(Collection schemas, Collection) ontology.any().get(key)).contains(classification)) tempClassifications.add(classification); } - - } - else if (ontology.any().get(key) instanceof String) { + } else if (ontology.any().get(key) instanceof String) { if(ontology.any().get(key) != null) if(classifications.contains((String) ontology.any().get(key))) tempClassifications.add( (String) ontology.any().get(key)); } - } if(tempClassifications.containsAll(classifications)) filteredSet.add(ontology); @@ -280,6 +279,43 @@ else if (ontology.any().get(key) instanceof String) { return filteredSet; } + public Set filterLicense(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + if (ontology.any().keySet().contains("license")){ + LinkedTreeMap license = (LinkedTreeMap) ontology.any().get("license"); + String label = license.get("label") != null ? (String) license.get("label") : ""; + String logo = license.get("logo") != null ? (String) license.get("logo") : ""; + String url = license.get("url") != null ? (String) license.get("url") : ""; + if (exclusive){ + Set tempClassifications = new HashSet(); + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + tempClassifications.add("license.label"); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + tempClassifications.add("license.logo"); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + tempClassifications.add("license.url"); + + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + + } else { + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + filteredSet.add(ontology); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + filteredSet.add(ontology); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + filteredSet.add(ontology); + } + } + } + + return filteredSet; + } + public Set getSchemaKeys(String lang){ Set tempSet = new HashSet(); tempSet.addAll(getOntologies(lang)); From 8e6be2d1bed4f20888197de935059e3a0d356772 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 16 Apr 2024 19:43:03 +0200 Subject: [PATCH 040/146] added composite and license type filtering options to v1 api for #3 --- .../api/v1/V1OntologyController.java | 10 +- .../controller/api/v1/V1SearchController.java | 6 +- .../controller/api/v1/V1SelectController.java | 6 +- .../api/v1/V1SuggestController.java | 6 +- .../repository/v1/V1OntologyRepository.java | 113 +++++++++++++++++- dataload/configs/testfilter.json | 1 + 6 files changed, 133 insertions(+), 9 deletions(-) create mode 100644 dataload/configs/testfilter.json diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index 7543dde03..ce5dd61b1 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -23,6 +23,7 @@ import org.springframework.web.bind.annotation.*; import io.swagger.v3.oas.annotations.Parameter; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import java.lang.reflect.*; @@ -89,11 +90,18 @@ HttpEntity> getOntologiesByMetadata( @RequestParam(value = "classification", required = true) Collection classifications, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @PageableDefault(size = 100, page = 0) Pageable pageable, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, PagedResourcesAssembler assembler ) throws ResourceNotFoundException { - Set tempSet = ontologyRepository.filter(schemas,classifications,exclusive,lang); + Set tempSet = new HashSet(); + if (filterOption == FilterOption.LINEAR) + tempSet = ontologyRepository.filter(schemas,classifications,exclusive, lang); + else if (filterOption == FilterOption.COMPOSITE) + tempSet = ontologyRepository.filterComposite(schemas,classifications,exclusive, lang); + else if (filterOption == FilterOption.LICENSE) + tempSet = ontologyRepository.filterLicense(schemas,classifications,exclusive,lang); List tempList = new ArrayList(); tempList.addAll(tempSet); final int start = (int)pageable.getOffset(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index cf4b495ab..47d25c2b6 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -15,6 +15,7 @@ import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; @@ -53,9 +54,10 @@ public void search( @RequestParam("q") String query, @RequestParam(value = "schema", required = false) Collection schemas, @RequestParam(value = "classification", required = false) Collection classifications, + @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "ontology", required = false) Collection ontologies, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, @RequestParam(value = "fieldList", required = false) Collection fieldList, @@ -75,7 +77,7 @@ public void search( HttpServletResponse response ) throws IOException, SolrServerException { - ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); final SolrQuery solrQuery = new SolrQuery(); // 1 diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index 4c994cf89..a6446af28 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -15,6 +15,7 @@ import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; @@ -45,9 +46,10 @@ public void select( @RequestParam("q") String query, @RequestParam(value = "schema", required = false) Collection schemas, @RequestParam(value = "classification", required = false) Collection classifications, + @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "ontology", required = false) Collection ontologies, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, @RequestParam(value = "fieldList", required = false) Collection fieldList, @@ -61,7 +63,7 @@ public void select( HttpServletResponse response ) throws IOException, SolrServerException { - ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); final SolrQuery solrQuery = new SolrQuery(); // 1 diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 03afc176b..cd4fdb760 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -14,6 +14,7 @@ import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; @@ -39,16 +40,17 @@ public void suggest( @RequestParam("q") String query, @RequestParam(value = "schema", required = false) Collection schemas, @RequestParam(value = "classification", required = false) Collection classifications, + @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "ontology", required = false) Collection ontologies, + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "rows", defaultValue = "10") Integer rows, @RequestParam(value = "start", defaultValue = "0") Integer start, @RequestParam(value = "lang", defaultValue = "en") String lang, HttpServletResponse response ) throws IOException, SolrServerException { - ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,lang); + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); final SolrQuery solrQuery = new SolrQuery(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index f50b27e8b..301997805 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -7,7 +7,9 @@ import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; +import uk.ac.ebi.spot.ols.model.License; import uk.ac.ebi.spot.ols.repository.solr.SearchType; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; @@ -58,7 +60,7 @@ public Page getAll(String lang, Pageable pageable) { .map(result -> V1OntologyMapper.mapOntology(result, lang)); } - public Collection filterOntologyIDs(Collection schemas,Collection classifications, Collection ontologies, boolean exclusiveFilter, String lang){ + public Collection filterOntologyIDs(Collection schemas, Collection classifications, Collection ontologies, boolean exclusiveFilter, FilterOption filterOption, String lang){ if (schemas != null) schemas.remove(""); if (classifications != null) @@ -69,7 +71,13 @@ public Collection filterOntologyIDs(Collection schemas,Collectio return null; if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) return ontologies; - Set documents = filter(schemas, classifications, exclusiveFilter,lang); + Set documents; + if(FilterOption.COMPOSITE == filterOption) + documents = filterComposite(schemas, classifications, exclusiveFilter,lang); + else if (FilterOption.LINEAR == filterOption) + documents = filter(schemas, classifications, exclusiveFilter,lang); + else + documents = filterLicense(schemas, classifications, exclusiveFilter,lang); Set filteredOntologySet = new HashSet(); for (V1Ontology document : documents){ filteredOntologySet.add(document.ontologyId); @@ -182,6 +190,107 @@ else if (String.class.isAssignableFrom(field.getType())) { return filteredSet; } + public Set filterComposite(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + if(schemas != null && classifications != null) + if(!exclusive) { + for (V1Ontology ontologyDocument : getAll(lang)) { + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } + + } + } + } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { + String schema = schemas.iterator().next(); + String classification = classifications.iterator().next(); + System.out.println("schema: "+schema); + System.out.println("classification: "+classification); + for (V1Ontology ontologyDocument : getAll(lang)){ + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } + + } + } + } else { + for (V1Ontology ontologyDocument : getAll(lang)) { + Set tempClassifications = new HashSet(); + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) { + for (Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { + for (String schema : schemas) + if (classificationSchema.containsKey(schema)) { + for (String classification : classifications) { + if (classificationSchema.get(schema) != null) { + if (!classificationSchema.get(schema).isEmpty()) { + if (classificationSchema.get(schema).contains(classification)) { + tempClassifications.add(classification); + } + } + } + } + } + } + if (tempClassifications.containsAll(classifications)) + tempSet.add(ontologyDocument); + } + } + } + return tempSet; + } + + + public Set filterLicense(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getAll(lang)); + + for (V1Ontology ontology : tempSet){ + if (ontology.config.license != null){ + License license = ontology.config.license; + String label = license.getLabel() != null ? (String) license.getLabel() : ""; + String logo = license.getLogo() != null ? (String) license.getLogo() : ""; + String url = license.getUrl() != null ? (String) license.getUrl() : ""; + if (exclusive){ + Set tempClassifications = new HashSet(); + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + tempClassifications.add("license.label"); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + tempClassifications.add("license.logo"); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + tempClassifications.add("license.url"); + + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + + } else { + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + filteredSet.add(ontology); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + filteredSet.add(ontology); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + filteredSet.add(ontology); + } + } + } + + return filteredSet; + } + public Set getSchemaKeys(String lang){ Set tempSet = new HashSet(); tempSet.addAll(getAll(lang)); diff --git a/dataload/configs/testfilter.json b/dataload/configs/testfilter.json new file mode 100644 index 000000000..674e5598d --- /dev/null +++ b/dataload/configs/testfilter.json @@ -0,0 +1 @@ +{"name": "OBO Foundry", "title": "The OBO Foundry", "markdown": "kramdown", "highlighter": "rouge", "baseurl": "/", "imgurl": "/images", "repo": "https://github.com/OBOFoundry/OBOFoundry.github.io/", "repo_src": "https://github.com/OBOFoundry/OBOFoundry.github.io/blob/master/", "author": {"name": "OBO Technical WG"}, "ontologies": [{"ontology_purl": "http://purl.obolibrary.org/obo/iao.owl", "id": "iao", "description": "An ontology of information entities.", "homepage": "https://github.com/information-artifact-ontology/IAO/", "license": {"url": "http://creativecommons.org/licenses/by/4.0/", "label": "CC-BY"}, "title": "Information Artifact Ontology", "tracker": "https://github.com/information-artifact-ontology/IAO/issues", "definition_property": ["http://purl.obolibrary.org/obo/IAO_0000115"], "creator": ["Adam Goldstein", "Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bjoern Peters", "Carlo Torniai", "Chris Mungall", "Chris Stoeckert", "Christian A. Boelling", "Darren Natale", "David Osumi-Sutherland", "Gwen Frishkoff", "Holger Stenzhorn", "James A. Overton", "James Malone", "Jennifer Fostel", "Jie Zheng", "Jonathan Rees", "Larisa Soldatova", "Lawrence Hunter", "Mathias Brochhausen", "Matt Brush", "Melanie Courtot", "Michel Dumontier", "Paolo Ciccarese", "Pat Hayes", "Philippe Rocca-Serra", "Randy Dipert", "Ron Rudnicki", "Satya Sahoo", "Sivaram Arabandi", "Werner Ceusters", "William Duncan", "William Hogan", "Yongqun (Oliver) He"], "classifications": [{"collection": ["NFDI4ING", "NFDI4CHEM", "FAIR Data Spaces"]}, {"subject": ["General"]}]}, {"ontology_purl": "https://raw.githubusercontent.com/micheldumontier/semanticscience/master/ontology/sio/release/sio-release.owl", "title": "Semanticscience Integrated Ontology (SIO)", "id": "sio", "preferredPrefix": "sio", "license": {"label": "CC BY 4.0", "url": "http://creativecommons.org/licenses/by/4.0/"}, "description": "The Semanticscience Integrated Ontology (SIO) provides a simple, integrated ontology of types and relations for rich description of objects, processes and their attributes.", "homepage": "https://github.com/MaastrichtU-IDS/semanticscience", "creator": ["Michel Dumontier"], "is_foundary": false, "base_uri": ["http://semanticscience.org/resource/"], "classifications": [{"collection": ["NFDI4ING", "NFDI4CHEM", "ESS"]}, {"subject": ["General"]}], "repo_url": "https://github.com/MaastrichtU-IDS/semanticscience"}, {"ontology_purl": "http://purl.obolibrary.org/obo/bfo.owl", "description": "The upper level ontology upon which OBO Foundry ontologies are built.", "id": "bfo", "license": {"label": "CC-BY", "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", "url": "http://creativecommons.org/licenses/by/4.0/"}, "mailing_list": "https://groups.google.com/forum/#!forum/bfo-discuss", "title": "Basic Formal Ontology", "homepage": "http://ifomis.org/bfo/", "tracker": "https://github.com/BFO-ontology/BFO/issues", "definition_property": ["http://purl.obolibrary.org/obo/IAO_0000115"], "creator": ["Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bill Duncan", "Bjoern Peters", "Chris Mungall", "David Osumi-Sutherland", "Fabian Neuhaus", "Holger Stenzhorn", "James A. Overton", "Janna Hastings", "Jie Zheng", "Jonathan Bona", "Larry Hunter", "Leonard Jacuzzo", "Ludger Jansen", "Mark Ressler", "Mathias Brochhausen", "Mauricio Almeida", "Melanie Courtot", "Pierre Grenon", "Randall Dipert", "Ron Rudnicki", "Selja Sepp\u00e4l\u00e4", "Stefan Schulz", "Thomas Bittner", "Werner Ceusters", "Yongqun He"], "is_foundary": true, "preferredPrefix": "BFO", "classifications": [{"collection": ["NFDI4CHEM", "NFDI4ING", "NFDI4CAT", "Foundational Ontologies"]}, {"subject": ["General"]}], "repo_url": "https://github.com/BFO-ontology/BFO"}]} From 976c2d164902529c9a463c1f40c59dd47ee3f016 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 17 Apr 2024 10:22:46 +0200 Subject: [PATCH 041/146] documented the filteroption parameter in all endpoints. Removed unnecessary parameters from allstatsbyschema endpoint. --- .../ols/controller/api/v1/V1OntologyController.java | 3 +++ .../ols/controller/api/v1/V1SearchController.java | 3 +++ .../ols/controller/api/v1/V1SelectController.java | 3 +++ .../ols/controller/api/v1/V1SuggestController.java | 3 +++ .../ols/controller/api/v2/V2OntologyController.java | 3 +++ .../controller/api/v2/V2StatisticsController.java | 12 +++++------- 6 files changed, 20 insertions(+), 7 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index ce5dd61b1..24c525843 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -90,6 +90,9 @@ HttpEntity> getOntologiesByMetadata( @RequestParam(value = "classification", required = true) Collection classifications, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @PageableDefault(size = 100, page = 0) Pageable pageable, @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index 47d25c2b6..87c5f00b2 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -57,6 +57,9 @@ public void search( @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index a6446af28..7ff41aa1c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -49,6 +49,9 @@ public void select( @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) Collection types, @RequestParam(value = "slim", required = false) Collection slims, diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index cd4fdb760..49cc77b65 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -43,6 +43,9 @@ public void suggest( @RequestParam(value = "ontology", required = false) Collection ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "rows", defaultValue = "10") Integer rows, @RequestParam(value = "start", defaultValue = "0") Integer start, diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index 34eb4e4e7..b0bb16613 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -58,6 +58,9 @@ public HttpEntity> getOntologies( @RequestParam(value = "ontology", required = false) List ontologies, @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption ) throws ResourceNotFoundException, IOException { Map> properties = new HashMap<>(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 229ef2a14..af2ca9709 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -46,6 +46,9 @@ public HttpEntity getStatistics( @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") @RequestParam(value = "ontologyIds", required = false) Collection ontologyIds, @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ @@ -60,24 +63,19 @@ public HttpEntity getStatistics( } return new ResponseEntity<>( computeStats(queryString), HttpStatus.OK); } - @Operation(description = "Get Schema based Statistics. All schemas with their respective classifications can be computed if a schema is not specified.") + @Operation(description = "Get Composite Schema based Statistics. All schemas with their respective classifications under the classifications variable will be computed.") @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity getStatisticsBySchema( @RequestParam(value = "schema", required = false) Collection schemas, - @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, - @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "lang", defaultValue = "en") String lang ) throws IOException { MultiKeyMap summaries = new MultiKeyMap(); - Collection keys = ontologyRepository.getSchemaKeys(lang); - for (String key : keys) { Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); - for (String value : values) { - summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),exclusive,filterOption,lang)); + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,FilterOption.LINEAR,lang)); } } From af1d757603d80a115c193f9a416932e307469d17 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 22 Apr 2024 12:18:20 +0200 Subject: [PATCH 042/146] updated some docker files and scripts for #8 --- backend/Dockerfile | 4 ++-- dataload/Dockerfile | 8 ++++---- dataload/load_into_neo4j.sh | 2 +- docker-compose.yml | 6 +++--- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/backend/Dockerfile b/backend/Dockerfile index 451925b87..4c0a7d6d6 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,8 +1,8 @@ -FROM maven:3.6.3-adoptopenjdk-14 +FROM maven:3.9.6-eclipse-temurin-17 RUN mkdir /opt/ols -COPY . /opt/ols/ +COPY . /opt/ols/ RUN cd /opt/ols && ls && mvn clean package -DskipTests EXPOSE 8080 diff --git a/dataload/Dockerfile b/dataload/Dockerfile index 6c7b6c75f..edecb0062 100644 --- a/dataload/Dockerfile +++ b/dataload/Dockerfile @@ -3,18 +3,18 @@ # This image, once built, should allow any arbitrary config to be loaded and output neo4j/solr datafiles -FROM maven:3.8-jdk-11 +FROM maven:3.9.6-eclipse-temurin-17 # Extract Neo4j and Solr vanilla installs to /opt/neo4j and /opt/solr # -# We use these only as temporary servers for the dataload. The "real" Neo4j +# We use these only as temporary servers for the dataload. The "real" Neo4j # and Solr servers are the standard images specified in docker-compose.yml # RUN mkdir /opt/neo4j && \ - curl https://dist.neo4j.org/neo4j-community-4.4.9-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j + curl https://dist.neo4j.org/neo4j-community-5.19.0-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j RUN mkdir /opt/solr && \ curl https://archive.apache.org/dist/solr/solr/9.0.0/solr-9.0.0.tgz | tar -xz --strip-components=1 -C /opt/solr @@ -25,7 +25,7 @@ RUN echo "dbms.jvm.additional=-Dorg.neo4j.kernel.impl.index.schema.GenericNative # Copy all the code for dataload into /opt/dataload and build the JARs # RUN mkdir /opt/dataload -COPY . /opt/dataload/ +COPY . /opt/dataload/ RUN cd /opt/dataload && mvn clean package # Copy the OLS4 Solr config into our temporary Solr server diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index 21bc5b286..59e002633 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -8,7 +8,7 @@ fi rm -rf $1/data/databases/neo4j rm -rf $1/data/transactions/neo4j -$1/bin/neo4j-admin import \ +$1/bin/neo4j-admin database import full \ --ignore-empty-strings=true \ --legacy-style-quoting=false \ --multiline-fields=true \ diff --git a/docker-compose.yml b/docker-compose.yml index 978f97fee..baea0f2d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -22,7 +22,7 @@ services: ols4-dataload: condition: service_completed_successfully ols4-neo4j: - image: neo4j:4.4.9-community + image: neo4j:5.19.0-community ports: - 7474:7474 - 7687:7687 @@ -42,10 +42,10 @@ services: - OLS_NEO4J_HOST=bolt://ols4-neo4j:7687 depends_on: - ols4-solr - - ols4-neo4j + - ols4-neo4j links: - ols4-solr - - ols4-neo4j + - ols4-neo4j ols4-frontend: build: context: ./frontend From 78cb7a0065c3b401f2037bf0a94e6f24d2594aef Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 25 Apr 2024 17:32:46 +0200 Subject: [PATCH 043/146] updated pom file and neo4j command script for #8 --- backend/pom.xml | 8 ++++---- dataload/load_into_neo4j.sh | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/backend/pom.xml b/backend/pom.xml index 935a5a0ed..a45e1e8ff 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -15,9 +15,9 @@ - 11 - 11 - 11 + 17 + 17 + 17 @@ -31,7 +31,7 @@ org.neo4j.driver neo4j-java-driver - 4.4.1 + 5.19.0 diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index 59e002633..0c5305687 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -14,8 +14,10 @@ $1/bin/neo4j-admin database import full \ --multiline-fields=true \ --read-buffer-size=16777216 \ --array-delimiter="|" \ - --database=neo4j \ - --processors=16 \ + --threads=16 \ $(./make_csv_import_cmd.sh $2) +$1/bin/neo4j-admin database info neo4j + + From 8528555ba954104c61ef0a3aa4c7de9a3e75d052 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 24 May 2024 16:20:26 +0200 Subject: [PATCH 044/146] initialized a neo4j import module for #8 --- dataload/csv2neo/dependency-reduced-pom.xml | 53 +++++ dataload/csv2neo/pom.xml | 83 +++++++ .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 212 ++++++++++++++++++ 3 files changed, 348 insertions(+) create mode 100644 dataload/csv2neo/dependency-reduced-pom.xml create mode 100644 dataload/csv2neo/pom.xml create mode 100644 dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java diff --git a/dataload/csv2neo/dependency-reduced-pom.xml b/dataload/csv2neo/dependency-reduced-pom.xml new file mode 100644 index 000000000..d8516fc87 --- /dev/null +++ b/dataload/csv2neo/dependency-reduced-pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + uk.ac.ebi.spot + csv2neo + csv2neo + 1.0-SNAPSHOT + + + + maven-shade-plugin + + + package + + shade + + + + + false + + + uk.ac.ebi.spot.csv2neo.ImportCSV + + true + + + + + + false + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + 17 + 17 + + diff --git a/dataload/csv2neo/pom.xml b/dataload/csv2neo/pom.xml new file mode 100644 index 000000000..803f87766 --- /dev/null +++ b/dataload/csv2neo/pom.xml @@ -0,0 +1,83 @@ + + + 4.0.0 + + uk.ac.ebi.spot + csv2neo + 1.0-SNAPSHOT + csv2neo + jar + + + 17 + 17 + + + + + org.neo4j.driver + neo4j-java-driver + 5.19.0 + + + + + + + + org.apache.maven.plugins + maven-shade-plugin + + false + + + + + + uk.ac.ebi.spot.csv2neo.ImportCSV + + + true + + + + + + false + + + + + + *:* + + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + package + + + shade + + + + + + + diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java new file mode 100644 index 000000000..52bb5138c --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -0,0 +1,212 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.neo4j.driver.AuthTokens; +import org.neo4j.driver.GraphDatabase; +import org.neo4j.driver.SessionConfig; +import org.neo4j.driver.Transaction; + +import java.io.*; +import java.util.ArrayList; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ImportCSV { + + static FileReader fr; + static BufferedReader br; + + + public static List showFiles(File[] files) throws IOException { + List fileList = new ArrayList(); + for (File file : files) { + if (file.isDirectory()) { + System.out.println("Directory: " + file.getAbsolutePath()); + fileList.addAll(showFiles(file.listFiles())); + } else { + System.out.println("File: " + file.getAbsolutePath()); + fileList.add(file); + } + } + + return fileList; + } + + public static String generateOntologyCreationQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length) { + + sb.append("CREATE (") + .append(":") + .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append(" {"); + sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); + + for (int i = 2; i < values.length; i++) { + String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); + sb.append(", ") + .append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") + .append(": ") + .append(convertToJSONArray("\'"+text+"\'")); + } + + sb.append("}") + .append(")") + .append(" "); + } + return sb.toString(); + } + + public static String generateClassCreationQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + + if (titles.length == values.length) { + + sb.append("CREATE (") + // .append(values[0].substring(1, values[0].length() - 1)) + .append(":") + .append(values[1].substring(1, values[1].length() - 1).replace('|',':')) + .append(" {"); + sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); + + for (int i = 2; i < values.length; i++) { + sb.append(", "); + + if (titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__").length() > 30) + sb.append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")+"`"); + else + sb.append(titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")); + sb.append(": ").append(convertToJSONArray("\'"+values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replace("\\","__")+"\'")); + + } + + sb.append("}") + .append(")") + .append(" "); + } + return sb.toString(); + } + + public static String generateClassSetQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length){ + sb.append("MATCH (n) where n.id = ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'").append(" SET "); + + boolean first = true; + + for (int i = 2; i < values.length; i++){ + + if (titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__").length() <= 30) + continue; + + if(!first) + sb.append(" AND "); + first = false; + + sb.append("n.").append(titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")) + .append(" = ").append(convertToJSONArray("\'"+values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replace("\\","__")+"\'")); + + } + + } + + return sb.toString(); + } + + public static String convertToJSONArray(String input){ + if (input.contains("|")){ + input = input.substring(1,input.length()-1); + String[] sarray = input.split("\\|"); + StringBuilder sb = new StringBuilder(); + sb.append("["); + for (int i = 0;i files = showFiles(dir.listFiles()); + + try (var driver = GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword))) { + driver.verifyConnectivity(); + + // import org.neo4j.driver.SessionConfig + + try (var session = driver.session(SessionConfig.builder().withDatabase("neo4j").build())) { + // session usage + + for (File file : files){ + if(file.getName().contains("_edges") || !file.getName().endsWith(".csv")) + continue; + // classes doesnt work ontologies work. {_json: '{ + fr = new FileReader(file.getAbsolutePath()); + br = new BufferedReader(fr); + String line = br.readLine(); + String[] titles = {}; + if (line != null) + titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + + while((line = br.readLine())!=null){ + String[] pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + + System.out.println("file: "+file.getName()); + + String query = generateOntologyCreationQuery(titles,pieces); + String query2 = generateClassSetQuery(titles,pieces); + System.out.println("query: "+query); + //System.out.println("query2: "+query2); + + try (Transaction tx = session.beginTransaction()) { + // "CREATE (o:Organization {id: randomuuid(), createdDate: datetime()})" + tx.run(query); + //tx.run(query2); + tx.commit(); + // use tx.run() to run queries + // tx.commit() to commit the transaction + // tx.rollback() to rollback the transaction + } /*catch(Exception e){ + e.printStackTrace(); + }*/ + } + } + } + } + } +} From 6f4431d70be5addca8c448abb9e152dbf367a3e4 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 27 May 2024 18:19:18 +0200 Subject: [PATCH 045/146] added a split function to extract json in csv and a routine for generating and executing relationship importer queries in #8 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 151 ++++++++++++------ dataload/pom.xml | 1 + 2 files changed, 99 insertions(+), 53 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 52bb5138c..cdb345aed 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -32,7 +32,7 @@ public static List showFiles(File[] files) throws IOException { return fileList; } - public static String generateOntologyCreationQuery(String[] titles, String[] values){ + public static String generateNodeCreationQuery(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); @@ -55,43 +55,16 @@ public static String generateOntologyCreationQuery(String[] titles, String[] val sb.append("}") .append(")") .append(" "); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + for (String title : titles) + System.out.println("title: "+title); } return sb.toString(); } - public static String generateClassCreationQuery(String[] titles, String[] values){ - - StringBuilder sb = new StringBuilder(); - - - if (titles.length == values.length) { - - sb.append("CREATE (") - // .append(values[0].substring(1, values[0].length() - 1)) - .append(":") - .append(values[1].substring(1, values[1].length() - 1).replace('|',':')) - .append(" {"); - sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); - - for (int i = 2; i < values.length; i++) { - sb.append(", "); - - if (titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__").length() > 30) - sb.append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")+"`"); - else - sb.append(titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")); - sb.append(": ").append(convertToJSONArray("\'"+values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replace("\\","__")+"\'")); - - } - - sb.append("}") - .append(")") - .append(" "); - } - return sb.toString(); - } - - public static String generateClassSetQuery(String[] titles, String[] values){ + public static String generateNodeSetQuery(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); @@ -101,19 +74,28 @@ public static String generateClassSetQuery(String[] titles, String[] values){ boolean first = true; for (int i = 2; i < values.length; i++){ - - if (titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__").length() <= 30) - continue; - if(!first) sb.append(" AND "); first = false; + String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); + sb.append("n.").append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") + .append(" = ").append(convertToJSONArray("\'"+text+"\'")); + } - sb.append("n.").append(titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"").replace("\\","__")) - .append(" = ").append(convertToJSONArray("\'"+values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replace("\\","__")+"\'")); + } - } + return sb.toString(); + } + + public static String generateRelationCreationQuery(String[] titles, String[] values){ + StringBuilder sb = new StringBuilder(); + if (titles.length == values.length){ + sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") + .append("(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") + .append("CREATE (n)-[:") + .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append("]->(m)"); } return sb.toString(); @@ -154,14 +136,68 @@ public static String decode(String input) { return decodedString.toString(); } + public static String[] split(String input){ + String[] tokens = {}; + char c = '{'; + char d = '\"'; + char e = '}'; + String left = String.valueOf(d) + c; + String right = String.valueOf(e) + d; + int countLeftCurly = countOccurrences(input, left); + int countRightCurly = countOccurrences(input, right); + + if(countLeftCurly == 0 && countRightCurly == 0){ + tokens = input.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + } else if(countLeftCurly == countRightCurly && countLeftCurly == 1){ + String[] content = input.split("\"\\{"); + String before = ""; + String after = ""; + String json = ""; + before = content[0]; + if (before.endsWith(",")) + before = before.substring(0,before.length()-1); + String[] content2 = content[1].split("\\}\""); + json = String.valueOf(d)+String.valueOf(c)+content2[0]+String.valueOf(e)+String.valueOf(d); + after = content2[1]; + if(after.startsWith(",")) + after = after.substring(1,after.length()); + String[] beforeArray = before.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + String[] afterArray = after.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + int length = beforeArray.length + 1 + afterArray.length; + tokens = new String[length]; + for (int i =0;i files = showFiles(dir.listFiles()); try (var driver = GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword))) { @@ -171,42 +207,51 @@ public static void main(String... args) throws IOException { try (var session = driver.session(SessionConfig.builder().withDatabase("neo4j").build())) { // session usage + try{ + session.run("CREATE CONSTRAINT FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); + session.run("CREATE CONSTRAINT FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); + session.run("CREATE CONSTRAINT FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); + } catch(Exception e){ + e.printStackTrace(); + } for (File file : files){ - if(file.getName().contains("_edges") || !file.getName().endsWith(".csv")) + if((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) continue; - // classes doesnt work ontologies work. {_json: '{ fr = new FileReader(file.getAbsolutePath()); br = new BufferedReader(fr); String line = br.readLine(); String[] titles = {}; if (line != null) titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - + String[] pieces = null; while((line = br.readLine())!=null){ - String[] pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + System.out.println(line); + pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + System.out.println("2"); System.out.println("file: "+file.getName()); - String query = generateOntologyCreationQuery(titles,pieces); - String query2 = generateClassSetQuery(titles,pieces); + String query = generateRelationCreationQuery(titles,pieces); + //String query2 = generateSetQuery(titles,pieces); System.out.println("query: "+query); //System.out.println("query2: "+query2); try (Transaction tx = session.beginTransaction()) { - // "CREATE (o:Organization {id: randomuuid(), createdDate: datetime()})" tx.run(query); - //tx.run(query2); tx.commit(); + tx.close(); // use tx.run() to run queries // tx.commit() to commit the transaction // tx.rollback() to rollback the transaction - } /*catch(Exception e){ + } catch(Exception e){ e.printStackTrace(); - }*/ + } + System.gc(); } } } } + System.out.println("kamil"); } } diff --git a/dataload/pom.xml b/dataload/pom.xml index 8c7727682..e6e2378f4 100644 --- a/dataload/pom.xml +++ b/dataload/pom.xml @@ -13,6 +13,7 @@ linker json2solr json2neo + csv2neo extras From a6dde7205044a48536f56b0de5d27a755b85fd5a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 28 May 2024 14:03:04 +0200 Subject: [PATCH 046/146] added ontology remove, ingest and authentication menu options to CSV Importer in #8 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 252 +++++++----------- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 121 +++++++++ 2 files changed, 213 insertions(+), 160 deletions(-) create mode 100644 dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index cdb345aed..399d8af50 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -1,22 +1,26 @@ package uk.ac.ebi.spot.csv2neo; -import org.neo4j.driver.AuthTokens; -import org.neo4j.driver.GraphDatabase; -import org.neo4j.driver.SessionConfig; -import org.neo4j.driver.Transaction; - +import org.neo4j.driver.*; import java.io.*; import java.util.ArrayList; import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ public class ImportCSV { static FileReader fr; static BufferedReader br; - public static List showFiles(File[] files) throws IOException { List fileList = new ArrayList(); for (File file : files) { @@ -32,111 +36,57 @@ public static List showFiles(File[] files) throws IOException { return fileList; } - public static String generateNodeCreationQuery(String[] titles, String[] values){ - - StringBuilder sb = new StringBuilder(); - - if (titles.length == values.length) { - - sb.append("CREATE (") - .append(":") - .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") - .append(" {"); - sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); - - for (int i = 2; i < values.length; i++) { - String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); - sb.append(", ") - .append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") - .append(": ") - .append(convertToJSONArray("\'"+text+"\'")); - } - - sb.append("}") - .append(")") - .append(" "); - } else { - System.out.println("titles and values are not equal"); - System.out.println("titles: "+titles.length + " - values: " +values.length); - for (String title : titles) - System.out.println("title: "+title); - } - return sb.toString(); - } - - public static String generateNodeSetQuery(String[] titles, String[] values){ - - StringBuilder sb = new StringBuilder(); - - if (titles.length == values.length){ - sb.append("MATCH (n) where n.id = ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'").append(" SET "); - - boolean first = true; - - for (int i = 2; i < values.length; i++){ - if(!first) - sb.append(" AND "); - first = false; - String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); - sb.append("n.").append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") - .append(" = ").append(convertToJSONArray("\'"+text+"\'")); + public static void generateNEO(List files, Session session) throws IOException { + for (File file : files){ + if((file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) + continue; + fr = new FileReader(file.getAbsolutePath()); + br = new BufferedReader(fr); + String line = br.readLine(); + String[] titles = {}; + if (line != null) + titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + String[] pieces = null; + while((line = br.readLine())!=null){ + System.out.println(line); + pieces = split(line,",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + String query = generateNodeCreationQuery(titles,pieces); + System.out.println("query: "+query); + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } } - - } - - return sb.toString(); - } - - public static String generateRelationCreationQuery(String[] titles, String[] values){ - StringBuilder sb = new StringBuilder(); - - if (titles.length == values.length){ - sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") - .append("(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") - .append("CREATE (n)-[:") - .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") - .append("]->(m)"); } - return sb.toString(); - } - - public static String convertToJSONArray(String input){ - if (input.contains("|")){ - input = input.substring(1,input.length()-1); - String[] sarray = input.split("\\|"); - StringBuilder sb = new StringBuilder(); - sb.append("["); - for (int i = 0;i files = showFiles(dir.listFiles()); - try (var driver = GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword))) { + try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); - - // import org.neo4j.driver.SessionConfig - - try (var session = driver.session(SessionConfig.builder().withDatabase("neo4j").build())) { - // session usage + try (var session = driver.session(SessionConfig.builder().withDatabase(db).build())) { try{ session.run("CREATE CONSTRAINT FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); session.run("CREATE CONSTRAINT FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); @@ -214,44 +177,13 @@ public static void main(String... args) throws IOException { } catch(Exception e){ e.printStackTrace(); } - - for (File file : files){ - if((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) - continue; - fr = new FileReader(file.getAbsolutePath()); - br = new BufferedReader(fr); - String line = br.readLine(); - String[] titles = {}; - if (line != null) - titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - String[] pieces = null; - while((line = br.readLine())!=null){ - System.out.println(line); - pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - System.out.println("2"); - - System.out.println("file: "+file.getName()); - - String query = generateRelationCreationQuery(titles,pieces); - //String query2 = generateSetQuery(titles,pieces); - System.out.println("query: "+query); - //System.out.println("query2: "+query2); - - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - tx.close(); - // use tx.run() to run queries - // tx.commit() to commit the transaction - // tx.rollback() to rollback the transaction - } catch(Exception e){ - e.printStackTrace(); - } - System.gc(); - } - } + System.out.println("kamil"); + if(cmd.hasOption("i")) + generateNEO(files,session); + else + for(String ontology : ontologiesToBeRemoved.split(",")) + session.run(generateOntologyDeleteQuery(ontology)); } } - System.out.println("kamil"); } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java new file mode 100644 index 000000000..5cdb7c801 --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -0,0 +1,121 @@ +package uk.ac.ebi.spot.csv2neo; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class QueryGeneration { + + public static String generateNodeCreationQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length) { + + sb.append("CREATE (") + .append(":") + .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append(" {"); + sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); + + for (int i = 2; i < values.length; i++) { + String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); + sb.append(", ") + .append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") + .append(": ") + .append(convertToJSONArray("\'"+text+"\'")); + } + + sb.append("}") + .append(")") + .append(" "); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + for (String title : titles) + System.out.println("title: "+title); + } + return sb.toString(); + } + + public static String generateNodeSetQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length){ + sb.append("MATCH (n) where n.id = ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'").append(" SET "); + + boolean first = true; + + for (int i = 2; i < values.length; i++){ + if(!first) + sb.append(" AND "); + first = false; + String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); + sb.append("n.").append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") + .append(" = ").append(convertToJSONArray("\'"+text+"\'")); + } + + } + + return sb.toString(); + } + + public static String generateRelationCreationQuery(String[] titles, String[] values){ + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length){ + sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") + .append("(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") + .append("CREATE (n)-[:") + .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append("]->(m)"); + } + + return sb.toString(); + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix){ + return "MATCH (n) where n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; + } + + public static String convertToJSONArray(String input){ + if (input.contains("|")){ + input = input.substring(1,input.length()-1); + String[] sarray = input.split("\\|"); + StringBuilder sb = new StringBuilder(); + sb.append("["); + for (int i = 0;i Date: Tue, 28 May 2024 14:26:41 +0200 Subject: [PATCH 047/146] added commons cli dependency --- dataload/csv2neo/pom.xml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/dataload/csv2neo/pom.xml b/dataload/csv2neo/pom.xml index 803f87766..b6bd05a38 100644 --- a/dataload/csv2neo/pom.xml +++ b/dataload/csv2neo/pom.xml @@ -21,8 +21,13 @@ neo4j-java-driver 5.19.0 - - + + commons-cli + commons-cli + 1.5.0 + compile + + From fef2e408be489162e20420fd9744a50d9d88afb2 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 28 May 2024 16:49:16 +0200 Subject: [PATCH 048/146] changed condition for input files in #8 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 399d8af50..8c675d629 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -38,7 +38,8 @@ public static List showFiles(File[] files) throws IOException { public static void generateNEO(List files, Session session) throws IOException { for (File file : files){ - if((file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) + if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") + || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) continue; fr = new FileReader(file.getAbsolutePath()); br = new BufferedReader(fr); @@ -177,7 +178,7 @@ public static void main(String... args) throws IOException, ParseException { } catch(Exception e){ e.printStackTrace(); } - System.out.println("kamil"); + System.out.println("Start Neo4J Modification..."); if(cmd.hasOption("i")) generateNEO(files,session); else From f0bbf594190ae4c6f06acaed68c57df6227fa042 Mon Sep 17 00:00:00 2001 From: deepananbu Date: Wed, 29 May 2024 12:26:48 +0200 Subject: [PATCH 049/146] Added null check while parsing anootators --- .../annotators/DisjointWithAnnotator.java | 65 ++++++++++--------- .../rdf2json/annotators/RelatedAnnotator.java | 2 +- 2 files changed, 37 insertions(+), 30 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java index eb6a761a6..6476954dd 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java @@ -1,4 +1,5 @@ package uk.ac.ebi.rdf2json.annotators; + import java.util.*; import java.util.stream.Collectors; @@ -16,56 +17,62 @@ public static void annotateDisjointWith(OntologyGraph graph) { long startTime3 = System.nanoTime(); - for(String id : graph.nodes.keySet()) { + for (String id : graph.nodes.keySet()) { OntologyNode c = graph.nodes.get(id); if (c.types.contains(OntologyNode.NodeType.ALL_DISJOINT_CLASSES)) { - PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); - List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); + List members = RdfListEvaluator + .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - List classNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)).collect(Collectors.toList()); + List classNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) + .collect(Collectors.toList()); - for(OntologyNode classNodeA : classNodes) { - for(OntologyNode classNodeB : classNodes) { - if(classNodeB.uri != classNodeA.uri) { + for (OntologyNode classNodeA : classNodes) { + for (OntologyNode classNodeB : classNodes) { + if (classNodeA != null && classNodeB != null && classNodeB.uri != classNodeA.uri) { classNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#disjointWith", - PropertyValueURI.fromUri(classNodeB.uri)); + PropertyValueURI.fromUri(classNodeB.uri)); } } } } else if (c.types.contains(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES)) { - PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); - List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); + List members = RdfListEvaluator + .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - List propertyNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)).collect(Collectors.toList()); + List propertyNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) + .collect(Collectors.toList()); - for(OntologyNode propertyNodeA : propertyNodes) { - for(OntologyNode propertyNodeB : propertyNodes) { - if(propertyNodeB.uri != propertyNodeA.uri) { + for (OntologyNode propertyNodeA : propertyNodes) { + for (OntologyNode propertyNodeB : propertyNodes) { + if (propertyNodeB.uri != propertyNodeA.uri) { propertyNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#propertyDisjointWith", - PropertyValueURI.fromUri(propertyNodeB.uri)); + PropertyValueURI.fromUri(propertyNodeB.uri)); } } } - - } else if (c.types.contains(OntologyNode.NodeType.ALL_DIFFERENT)) { - PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); - List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - - List individualNodes = members.stream() - .map(val -> graph.getNodeForPropertyValue(val)) - .filter(val -> val != null) - .collect(Collectors.toList()); + } else if (c.types.contains(OntologyNode.NodeType.ALL_DIFFERENT)) { - for(OntologyNode individualNodeA : individualNodes) { - for(OntologyNode individualNodeB : individualNodes) { - if(individualNodeB.uri != individualNodeA.uri) { - individualNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#differentFrom", - PropertyValueURI.fromUri(individualNodeB.uri)); + PropertyValue membersList = c.properties + .getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); + if (membersList != null) { + List members = RdfListEvaluator + .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + + List individualNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) + .filter(val -> val != null).collect(Collectors.toList()); + + for (OntologyNode individualNodeA : individualNodes) { + for (OntologyNode individualNodeB : individualNodes) { + if (individualNodeB.uri != individualNodeA.uri) { + individualNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#differentFrom", + PropertyValueURI.fromUri(individualNodeB.uri)); + } } } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index 6df7eb8bd..63ce33abf 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -258,7 +258,7 @@ private static void annotateRelated_Class_subClassOf_Restriction_hasValue(Ontolo OntologyNode fillerNode = graph.nodes.get( ((PropertyValueURI) filler).getUri() ); - if(fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + if(fillerNode != null && fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { // fillerNode is an individual fillerNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); classNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); From 594c388e54b028764a06467cccf9c9e9f8016a1b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 3 Jun 2024 19:19:28 +0200 Subject: [PATCH 050/146] handled exceptions and made transaction safe mode optional --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 63 +++++++++++++------ 1 file changed, 43 insertions(+), 20 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 8c675d629..630d1649e 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -36,7 +36,7 @@ public static List showFiles(File[] files) throws IOException { return fileList; } - public static void generateNEO(List files, Session session) throws IOException { + public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException { for (File file : files){ if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) @@ -53,12 +53,19 @@ public static void generateNEO(List files, Session session) throws IOExcep pieces = split(line,",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String query = generateNodeCreationQuery(titles,pieces); System.out.println("query: "+query); - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query); + } catch (Exception e){ + e.printStackTrace(); + } } } @@ -77,12 +84,20 @@ public static void generateNEO(List files, Session session) throws IOExcep pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String query = generateRelationCreationQuery(titles,pieces); System.out.println("query: "+query); - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query); + } catch (Exception e){ + e.printStackTrace(); + } + } } } @@ -151,6 +166,7 @@ private static Options getOptions() { options.addOption("uri", "databaseuri",true, "neo4j database uri"); options.addOption("db", "database",true, "neo4j database name"); options.addOption("d", "directory",true, "neo4j csv import directory"); + options.addOption("s", "safe",false, "execute each neo4j query in transactions or the session"); return options; } @@ -165,9 +181,6 @@ public static void main(String... args) throws IOException, ParseException { final String directory = cmd.hasOption("d") ? cmd.getOptionValue("d") : "/tmp/out"; final String ontologiesToBeRemoved = cmd.hasOption("rm") ? cmd.getOptionValue("rm") : ""; - File dir = new File(directory); - List files = showFiles(dir.listFiles()); - try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); try (var session = driver.session(SessionConfig.builder().withDatabase(db).build())) { @@ -179,11 +192,21 @@ public static void main(String... args) throws IOException, ParseException { e.printStackTrace(); } System.out.println("Start Neo4J Modification..."); - if(cmd.hasOption("i")) - generateNEO(files,session); - else + if(cmd.hasOption("i")){ + File dir = new File(directory); + List files = showFiles(dir.listFiles()); + if(cmd.hasOption("s")) + generateCreationQueries(files,session,true); + else + generateCreationQueries(files,session,false); + } else for(String ontology : ontologiesToBeRemoved.split(",")) - session.run(generateOntologyDeleteQuery(ontology)); + try { + session.run(generateOntologyDeleteQuery(ontology)); + } catch (Exception e){ + e.printStackTrace(); + } + } } } From 80ddb9dfc36e0f764ba2d82b74edb49f53b17485 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 7 Jun 2024 12:57:23 +0200 Subject: [PATCH 051/146] narrowed down relationship creation query for #14 --- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 32 ++++++++++++++++--- 1 file changed, 28 insertions(+), 4 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 5cdb7c801..fb078979c 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -36,8 +36,8 @@ public static String generateNodeCreationQuery(String[] titles, String[] values) } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); - for (String title : titles) - System.out.println("title: "+title); + for (String value : values) + System.out.println("value: "+value); } return sb.toString(); } @@ -69,8 +69,9 @@ public static String generateRelationCreationQuery(String[] titles, String[] val StringBuilder sb = new StringBuilder(); if (titles.length == values.length){ - sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") - .append("(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") + sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") + .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") + .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") .append("CREATE (n)-[:") .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") .append("]->(m)"); @@ -79,6 +80,17 @@ public static String generateRelationCreationQuery(String[] titles, String[] val return sb.toString(); } + public static String generateRelationCreationQuery2(String[] titles, String[] values){ + StringBuilder sb = new StringBuilder(); + if (titles.length == values.length){ + sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"})-[:") + .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append("]->(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"})"); + } + + return sb.toString(); + } + public static String generateOntologyDeleteQuery(String ontologyPrefix){ return "MATCH (n) where n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; } @@ -118,4 +130,16 @@ public static String decode(String input) { return decodedString.toString(); } + public static String idToLabel(String id){ + String label = switch (id.split("\\+")[1]) { + case "class" -> ":OntologyClass"; + case "entity" -> ":OntologyEntity"; + case "ontology" -> ":Ontology"; + case "property" -> ":OntologyProperty"; + case "individual" -> ":OntologyIndividual"; + default -> ""; + }; + return label; + } + } From 9124d458d17247771b876f5c07913c1b30d17ac6 Mon Sep 17 00:00:00 2001 From: deepananbu Date: Wed, 12 Jun 2024 10:19:52 +0200 Subject: [PATCH 052/146] added logic to convert ontologies to RDF/XML format before parsing --- dataload/rdf2json/pom.xml | 212 ++- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 1627 +++++++++-------- 2 files changed, 997 insertions(+), 842 deletions(-) diff --git a/dataload/rdf2json/pom.xml b/dataload/rdf2json/pom.xml index 9d30b8a3c..49b16b1f0 100644 --- a/dataload/rdf2json/pom.xml +++ b/dataload/rdf2json/pom.xml @@ -1,117 +1,129 @@ - 4.0.0 + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 - uk.ac.ebi.spot - rdf2json - 1.0-SNAPSHOT - jar + uk.ac.ebi.spot + rdf2json + 1.0-SNAPSHOT + jar - - - org.apache.jena - apache-jena-libs - 3.17.0 - pom - - - com.google.code.gson - gson - 2.7 - - - com.google.guava - guava - 31.1-jre - - + + + org.apache.jena + apache-jena-libs + 3.17.0 + pom + + + com.google.code.gson + gson + 2.7 + + + com.google.guava + guava + 31.1-jre + + + org.obolibrary.robot + robot-core + 1.9.6 + + - - 11 - 11 - + + 11 + 11 + - - + + - - org.apache.maven.plugins - maven-shade-plugin - - false + + org.apache.maven.plugins + maven-shade-plugin + + false - - - - uk.ac.ebi.rdf2json.RDF2JSON - - - true - - - - - - false - - + + + uk.ac.ebi.rdf2json.RDF2JSON + + + true + + + + + + false + + - - - *:* - - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - package - - - shade - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + package + + + shade + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 9963f1a9c..e06b95815 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -1,853 +1,996 @@ package uk.ac.ebi.rdf2json; -import com.google.gson.stream.JsonWriter; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.ANNOTATION_PROPERTY; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.CLASS; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.DATA_PROPERTY; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.ENTITY; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.INDIVIDUAL; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.OBJECT_PROPERTY; +import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.PROPERTY; -import org.apache.jena.riot.RDFLanguages; -import uk.ac.ebi.rdf2json.annotators.*; -import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; -import uk.ac.ebi.rdf2json.properties.*; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.stream.Collectors; + +import javax.net.ssl.HttpsURLConnection; -import org.apache.jena.riot.Lang; import org.apache.jena.graph.Node; import org.apache.jena.graph.Triple; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFLanguages; import org.apache.jena.riot.RDFParser; import org.apache.jena.riot.RDFParserBuilder; import org.apache.jena.riot.system.StreamRDF; import org.apache.jena.sparql.core.Quad; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; +import org.semanticweb.owlapi.model.OWLDocumentFormat; +import org.semanticweb.owlapi.model.OWLOntology; +import org.semanticweb.owlapi.model.OWLOntologyCreationException; +import org.semanticweb.owlapi.model.OWLOntologyManager; +import org.semanticweb.owlapi.model.OWLOntologyStorageException; -import java.io.IOException; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.*; -import java.util.stream.Collectors; -import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.*; +import com.google.gson.stream.JsonWriter; + +import uk.ac.ebi.rdf2json.annotators.AncestorsAnnotator; +import uk.ac.ebi.rdf2json.annotators.ConfigurablePropertyAnnotator; +import uk.ac.ebi.rdf2json.annotators.DefinitionAnnotator; +import uk.ac.ebi.rdf2json.annotators.DirectParentsAnnotator; +import uk.ac.ebi.rdf2json.annotators.DisjointWithAnnotator; +import uk.ac.ebi.rdf2json.annotators.EquivalenceAnnotator; +import uk.ac.ebi.rdf2json.annotators.HasIndividualsAnnotator; +import uk.ac.ebi.rdf2json.annotators.HierarchicalParentsAnnotator; +import uk.ac.ebi.rdf2json.annotators.HierarchyFlagsAnnotator; +import uk.ac.ebi.rdf2json.annotators.HierarchyMetricsAnnotator; +import uk.ac.ebi.rdf2json.annotators.InverseOfAnnotator; +import uk.ac.ebi.rdf2json.annotators.IsObsoleteAnnotator; +import uk.ac.ebi.rdf2json.annotators.LabelAnnotator; +import uk.ac.ebi.rdf2json.annotators.NegativePropertyAssertionAnnotator; +import uk.ac.ebi.rdf2json.annotators.OboSynonymTypeNameAnnotator; +import uk.ac.ebi.rdf2json.annotators.OntologyMetadataAnnotator; +import uk.ac.ebi.rdf2json.annotators.PreferredRootsAnnotator; +import uk.ac.ebi.rdf2json.annotators.ReifiedPropertyAnnotator; +import uk.ac.ebi.rdf2json.annotators.RelatedAnnotator; +import uk.ac.ebi.rdf2json.annotators.SearchableAnnotationValuesAnnotator; +import uk.ac.ebi.rdf2json.annotators.ShortFormAnnotator; +import uk.ac.ebi.rdf2json.annotators.SynonymAnnotator; +import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; +import uk.ac.ebi.rdf2json.properties.PropertySet; +import uk.ac.ebi.rdf2json.properties.PropertyValue; +import uk.ac.ebi.rdf2json.properties.PropertyValueAncestors; +import uk.ac.ebi.rdf2json.properties.PropertyValueBNode; +import uk.ac.ebi.rdf2json.properties.PropertyValueLiteral; +import uk.ac.ebi.rdf2json.properties.PropertyValueRelated; +import uk.ac.ebi.rdf2json.properties.PropertyValueURI; public class OntologyGraph implements StreamRDF { - public Map config; - public List importUrls = new ArrayList<>(); - public Set languages = new TreeSet<>(); - public long sourceFileTimestamp; - - public int numberOfClasses = 0; - public int numberOfProperties = 0; - public int numberOfIndividuals = 0; - - private RDFParserBuilder createParser(Lang lang) { - - if(lang != null) { - return RDFParser.create() - .forceLang(lang) - .strict(false) - .checking(false); - } else { - return RDFParser.create() - .strict(false) - .checking(false); - } - } - - private void parseRDF(String url) { - - try { - if (loadLocalFiles && !url.contains("://")) { - System.out.println("Using local file for " + url); - sourceFileTimestamp = new File(url).lastModified(); - createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)) - .source(new FileInputStream(url)).parse(this); - } else { - if (downloadedPath != null) { - String existingDownload = downloadedPath + "/" + urlToFilename(url); - try { - FileInputStream is = new FileInputStream(existingDownload); - System.out.println("Using predownloaded file for " + url); - sourceFileTimestamp = new File(existingDownload).lastModified(); - Lang lang = null; - try { - String existingDownloadMimeType = Files.readString(Paths.get(existingDownload + ".mimetype")); - lang = RDFLanguages.contentTypeToLang(existingDownloadMimeType); - } catch(IOException ignored) { - } - if(lang == null) { - lang = Lang.RDFXML; - } - createParser(lang).source(is).parse(this); - } catch (Exception e) { - System.out.println("Downloading (not predownloaded) " + url); - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); - } - } else { - System.out.println("Downloading (no predownload path provided) " + url); - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); - } - } - } catch (FileNotFoundException e) { - throw new RuntimeException(e); - } - } - - private String urlToFilename(String url) { - return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); - } - - - private boolean loadLocalFiles; - - String downloadedPath; - - - OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath) { - - this.loadLocalFiles = loadLocalFiles; - this.downloadedPath = downloadedPath; - - long startTime = System.nanoTime(); - - this.config = config; - - languages.add("en"); - - String url = (String) config.get("ontology_purl"); - - if(url == null) { - - Collection> products = - (Collection>) config.get("products"); - - if(products != null) { - for(Map product : products) { - - String purl = (String) product.get("ontology_purl"); - - if(purl != null && purl.endsWith(".owl")) { - url = purl; - break; - } - - } - } + public Map config; + public List importUrls = new ArrayList<>(); + public Set languages = new TreeSet<>(); + public long sourceFileTimestamp; + + public int numberOfClasses = 0; + public int numberOfProperties = 0; + public int numberOfIndividuals = 0; - } - - if(url == null) { - System.out.println("Could not determine URL for ontology " + (String)config.get("id")); - return; - } - - System.out.println("load ontology from: " + url); - parseRDF(url); - - // Before we evaluate imports, mark all the nodes so far as not imported - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if(c.uri != null) { - c.properties.addProperty("imported", PropertyValueLiteral.fromString("false")); - } - } + private RDFParserBuilder createParser(Lang lang) { + if (lang != null) { + return RDFParser.create().forceLang(lang).strict(false).checking(false); + } else { + return RDFParser.create().strict(false).checking(false); + } + } + + private void parseRDF(String url) { + + try { + if (loadLocalFiles && !url.contains("://")) { + System.out.println("Using local file for " + url); + sourceFileTimestamp = new File(url).lastModified(); + createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)).source(new FileInputStream(url)) + .parse(this); + } else { + if (downloadedPath != null) { + String existingDownload = downloadedPath + "/" + urlToFilename(url); + try { + FileInputStream is = new FileInputStream(existingDownload); + System.out.println("Using predownloaded file for " + url); + sourceFileTimestamp = new File(existingDownload).lastModified(); + Lang lang = null; + try { + String existingDownloadMimeType = Files + .readString(Paths.get(existingDownload + ".mimetype")); + lang = RDFLanguages.contentTypeToLang(existingDownloadMimeType); + } catch (IOException ignored) { + } + if (lang == null) { + lang = Lang.RDFXML; + } + createParser(lang).source(is).parse(this); + } catch (Exception e) { + System.out.println("Downloading (not predownloaded) " + url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } + } else { + System.out.println("Downloading (no predownload path provided) " + url); + + + + /* + * String outputFile = "/home/anbalagand/rdftoJSON/testRobot/result"; + * OWLOntology ont = downloadToLocal(url, outputFile); OWLDocumentFormat odf = + * ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = + * odf.getKey(); String ext = ".owl"; if(lang1.contains("Turtle")) ext = ".ttl"; + * url = outputFile+ext; + */ + + sourceFileTimestamp = System.currentTimeMillis(); + + + createParser(null).source(url).parse(this); + + } + } + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } catch (IOException e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + + private OWLOntology downloadToLocal(String url, String outputFile) throws IOException { + OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); + FileOutputStream fos = null; + OWLOntology ont = null; + InputStream is = null; + URLConnection con = null; + boolean isParserException = false; + try { + boolean isRDF = true; + boolean isDefaultURLFailed = false; + + try { + URL tempURL = new URL(url); + con = tempURL.openConnection(); + is = tempURL.openStream(); + } catch (IOException e) { + isDefaultURLFailed = true; + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + + } + if (isDefaultURLFailed) { + try { + is = new URL(url).openStream(); + } catch (IOException e) { + e.printStackTrace(); + } + } + try { + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e) { + isParserException = true; + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + } + + if (isParserException) { + + try { + is = new URL(url).openStream(); + } catch (IOException e) { + e.printStackTrace(); + } + ont = ontManager.loadOntologyFromOntologyDocument(is); + } + + OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); + + String lang1 = odf.getKey(); + String ext = ".owl"; + if (lang1.contains("Turtle")) + ext = ".ttl"; + else if (!lang1.contains("RDF")) { + isRDF = false; + OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); + fos = new FileOutputStream(outputFile + ext); + ont.saveOntology(odf1, fos); + } + if (isRDF) { + fos = new FileOutputStream(outputFile + ext); + ont.saveOntology(fos); + } + + } catch (OWLOntologyCreationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (OWLOntologyStorageException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } finally { + if (fos != null) + fos.close(); + if (is != null) + is.close(); + + } + return ont; + } - while(importUrls.size() > 0) { - String importUrl = importUrls.get(0); - importUrls.remove(0); + private String urlToFilename(String url) { + return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); + } - System.out.println("import: " + importUrl); - parseRDF(importUrl); - } + private boolean loadLocalFiles; - // Now the imports are done, mark everything else as imported - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if(c.uri != null) { - if(!c.properties.hasProperty("imported")) { - c.properties.addProperty("imported", PropertyValueLiteral.fromString("true")); - } - } - } + String downloadedPath; - if(this.ontologyNode == null) { + OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath) { - //// - //// There was no owl:Ontology. - //// Could be an RDFS "ontology", or schema.org, or just some garbage file that didn't have any ontology in it - //// + this.loadLocalFiles = loadLocalFiles; + this.downloadedPath = downloadedPath; - // Fallback 1: look for a single node without an rdf:type (fixes loading dcterms and dc elements rdf files) + long startTime = System.nanoTime(); - List nodesWithoutTypes = this.nodes.values().stream().filter( - node -> node.uri != null && !node.properties.hasProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) - .collect(Collectors.toList()); + this.config = config; - if(nodesWithoutTypes.size() == 1) { - this.ontologyNode = nodesWithoutTypes.get(0); - } + languages.add("en"); - if(this.ontologyNode == null) { + String url = (String) config.get("ontology_purl"); - // Fallback 2: fabricate an ontology node using the base_uri (fixes loading Schema.org rdf) + if (url == null) { - List baseUris = (List) this.config.get("base_uri"); + Collection> products = (Collection>) config.get("products"); - if(baseUris != null) { - this.ontologyNode = new OntologyNode(); - this.ontologyNode.uri = baseUris.get(0); - this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); - this.nodes.put(baseUris.get(0), this.ontologyNode); - } + if (products != null) { + for (Map product : products) { - if(this.ontologyNode == null) { + String purl = (String) product.get("ontology_purl"); - // Fallback 3: fabricate an ontology node using the purl + if (purl != null && purl.endsWith(".owl")) { + url = purl; + break; + } - String purl = (String)this.config.get("ontology_purl"); + } + } - if(purl != null) { - this.ontologyNode = new OntologyNode(); - this.ontologyNode.uri = purl; - this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); - this.nodes.put(purl, this.ontologyNode); - } - } - } - } + } - ontologyNode.properties.addProperty( - "numberOfEntities", PropertyValueLiteral.fromString(Integer.toString( - numberOfClasses + numberOfProperties + numberOfIndividuals))); + if (url == null) { + System.out.println("Could not determine URL for ontology " + (String) config.get("id")); + return; + } - ontologyNode.properties.addProperty( - "numberOfClasses", PropertyValueLiteral.fromString(Integer.toString(numberOfClasses))); + System.out.println("load ontology from: " + url); + parseRDF(url); - ontologyNode.properties.addProperty( - "numberOfProperties", PropertyValueLiteral.fromString(Integer.toString(numberOfProperties))); + // Before we evaluate imports, mark all the nodes so far as not imported + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri != null) { + c.properties.addProperty("imported", PropertyValueLiteral.fromString("false")); + } + } - ontologyNode.properties.addProperty( - "numberOfIndividuals", PropertyValueLiteral.fromString(Integer.toString(numberOfIndividuals))); + while (importUrls.size() > 0) { + String importUrl = importUrls.get(0); + importUrls.remove(0); + System.out.println("import: " + importUrl); + parseRDF(importUrl); + } - if(!noDates) { - String now = java.time.LocalDateTime.now().toString(); + // Now the imports are done, mark everything else as imported + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri != null) { + if (!c.properties.hasProperty("imported")) { + c.properties.addProperty("imported", PropertyValueLiteral.fromString("true")); + } + } + } - ontologyNode.properties.addProperty( - "loaded", PropertyValueLiteral.fromString(now)); - - ontologyNode.properties.addProperty( - "sourceFileTimestamp", PropertyValueLiteral.fromString(new Date(sourceFileTimestamp).toString())); - } + if (this.ontologyNode == null) { - for(String language : languages) { - ontologyNode.properties.addProperty("language", PropertyValueLiteral.fromString(language)); - } + //// + //// There was no owl:Ontology. + //// Could be an RDFS "ontology", or schema.org, or just some garbage file that + //// didn't have any ontology in it + //// + // Fallback 1: look for a single node without an rdf:type (fixes loading dcterms + // and dc elements rdf files) - long endTime = System.nanoTime(); - System.out.println("load ontology: " + ((endTime - startTime) / 1000 / 1000 / 1000)); + List nodesWithoutTypes = this.nodes.values().stream() + .filter(node -> node.uri != null + && !node.properties.hasProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) + .collect(Collectors.toList()); - SearchableAnnotationValuesAnnotator.annotateSearchableAnnotationValues(this); - InverseOfAnnotator.annotateInverseOf(this); - NegativePropertyAssertionAnnotator.annotateNegativePropertyAssertions(this); - OboSynonymTypeNameAnnotator.annotateOboSynonymTypeNames(this); // n.b. this one labels axioms so must run before the ReifiedPropertyAnnotator - DirectParentsAnnotator.annotateDirectParents(this); - RelatedAnnotator.annotateRelated(this); - HierarchicalParentsAnnotator.annotateHierarchicalParents(this); // must run after RelatedAnnotator - AncestorsAnnotator.annotateAncestors(this); - HierarchyMetricsAnnotator.annotateHierarchyMetrics(this); // must run after HierarchicalParentsAnnotator - ShortFormAnnotator.annotateShortForms(this); - DefinitionAnnotator.annotateDefinitions(this); - SynonymAnnotator.annotateSynonyms(this); - ReifiedPropertyAnnotator.annotateReifiedProperties(this); - OntologyMetadataAnnotator.annotateOntologyMetadata(this); - HierarchyFlagsAnnotator.annotateHierarchyFlags(this); // must run after DirectParentsAnnotator and HierarchicalParentsAnnotator - IsObsoleteAnnotator.annotateIsObsolete(this); - LabelAnnotator.annotateLabels(this); // must run after ShortFormAnnotator - ConfigurablePropertyAnnotator.annotateConfigurableProperties(this); - PreferredRootsAnnotator.annotatePreferredRoots(this); - DisjointWithAnnotator.annotateDisjointWith(this); - HasIndividualsAnnotator.annotateHasIndividuals(this); - EquivalenceAnnotator.annotateEquivalance(this); - - } - - - static final Set classTypes = new TreeSet<>(Set.of(ENTITY, CLASS)); - static final Set dataPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, DATA_PROPERTY)); - - static final Set objectPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, OBJECT_PROPERTY)); - static final Set annotationPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, ANNOTATION_PROPERTY)); - - static final Set propertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY)); - static final Set individualTypes = new TreeSet<>(Set.of(ENTITY, INDIVIDUAL)); + if (nodesWithoutTypes.size() == 1) { + this.ontologyNode = nodesWithoutTypes.get(0); + } - public void write(JsonWriter writer) throws IOException { + if (this.ontologyNode == null) { - writer.beginObject(); + // Fallback 2: fabricate an ontology node using the base_uri (fixes loading + // Schema.org rdf) - writer.name("ontologyId"); - writer.value(((String) config.get("id")).toLowerCase()); + List baseUris = (List) this.config.get("base_uri"); - writer.name("iri"); - writer.value(ontologyNode.uri); + if (baseUris != null) { + this.ontologyNode = new OntologyNode(); + this.ontologyNode.uri = baseUris.get(0); + this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); + this.nodes.put(baseUris.get(0), this.ontologyNode); + } - for(String configKey : config.keySet()) { - Object configVal = config.get(configKey); + if (this.ontologyNode == null) { - // we include this (lowercased) as "ontologyId" rather than "id", - // so that the name "id" doesn't clash with downstream id fields in neo4j/solr - // - if(configKey.equals("id")) - continue; + // Fallback 3: fabricate an ontology node using the purl - // already included explicitly above - if(configKey.equals("ontologyId")) - continue; - - // don't print the iri from the config, we already printed the one from the OWL - // TODO: which one to keep, or should we keep both? - if(configKey.equals("iri")) - continue; + String purl = (String) this.config.get("ontology_purl"); - // annotated as hasPreferredRoot by PreferredRootsAnnotator, no need to duplicate - if(configKey.equals("preferred_root_term")) - continue; + if (purl != null) { + this.ontologyNode = new OntologyNode(); + this.ontologyNode.uri = purl; + this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); + this.nodes.put(purl, this.ontologyNode); + } + } + } + } - // everything else from the config is stored as a normal property - writer.name(configKey); - writeGenericValue(writer, configVal); - } + ontologyNode.properties.addProperty("numberOfEntities", PropertyValueLiteral + .fromString(Integer.toString(numberOfClasses + numberOfProperties + numberOfIndividuals))); - writeProperties(writer, ontologyNode.properties, Set.of("ontology")); + ontologyNode.properties.addProperty("numberOfClasses", + PropertyValueLiteral.fromString(Integer.toString(numberOfClasses))); - writer.name("classes"); - writer.beginArray(); + ontologyNode.properties.addProperty("numberOfProperties", + PropertyValueLiteral.fromString(Integer.toString(numberOfProperties))); - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.CLASS)) { - writeNode(writer, c, OntologyNode.NodeType.toString(classTypes)); - } - } + ontologyNode.properties.addProperty("numberOfIndividuals", + PropertyValueLiteral.fromString(Integer.toString(numberOfIndividuals))); - writer.endArray(); - - - writer.name("properties"); - writer.beginArray(); - - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.OBJECT_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(objectPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.ANNOTATION_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(annotationPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.DATA_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(dataPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(propertyTypes)); - } - } + if (!noDates) { + String now = java.time.LocalDateTime.now().toString(); - writer.endArray(); + ontologyNode.properties.addProperty("loaded", PropertyValueLiteral.fromString(now)); + ontologyNode.properties.addProperty("sourceFileTimestamp", + PropertyValueLiteral.fromString(new Date(sourceFileTimestamp).toString())); + } - writer.name("individuals"); - writer.beginArray(); + for (String language : languages) { + ontologyNode.properties.addProperty("language", PropertyValueLiteral.fromString(language)); + } - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { - writeNode(writer, c, OntologyNode.NodeType.toString(individualTypes)); - } - } + long endTime = System.nanoTime(); + System.out.println("load ontology: " + ((endTime - startTime) / 1000 / 1000 / 1000)); + + SearchableAnnotationValuesAnnotator.annotateSearchableAnnotationValues(this); + InverseOfAnnotator.annotateInverseOf(this); + NegativePropertyAssertionAnnotator.annotateNegativePropertyAssertions(this); + OboSynonymTypeNameAnnotator.annotateOboSynonymTypeNames(this); // n.b. this one labels axioms so must run before + // the ReifiedPropertyAnnotator + DirectParentsAnnotator.annotateDirectParents(this); + RelatedAnnotator.annotateRelated(this); + HierarchicalParentsAnnotator.annotateHierarchicalParents(this); // must run after RelatedAnnotator + AncestorsAnnotator.annotateAncestors(this); + HierarchyMetricsAnnotator.annotateHierarchyMetrics(this); // must run after HierarchicalParentsAnnotator + ShortFormAnnotator.annotateShortForms(this); + DefinitionAnnotator.annotateDefinitions(this); + SynonymAnnotator.annotateSynonyms(this); + ReifiedPropertyAnnotator.annotateReifiedProperties(this); + OntologyMetadataAnnotator.annotateOntologyMetadata(this); + HierarchyFlagsAnnotator.annotateHierarchyFlags(this); // must run after DirectParentsAnnotator and + // HierarchicalParentsAnnotator + IsObsoleteAnnotator.annotateIsObsolete(this); + LabelAnnotator.annotateLabels(this); // must run after ShortFormAnnotator + ConfigurablePropertyAnnotator.annotateConfigurableProperties(this); + PreferredRootsAnnotator.annotatePreferredRoots(this); + DisjointWithAnnotator.annotateDisjointWith(this); + HasIndividualsAnnotator.annotateHasIndividuals(this); + EquivalenceAnnotator.annotateEquivalance(this); - writer.endArray(); + } + static final Set classTypes = new TreeSet<>(Set.of(ENTITY, CLASS)); + static final Set dataPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, DATA_PROPERTY)); - writer.endObject(); + static final Set objectPropertyTypes = new TreeSet<>( + Set.of(ENTITY, PROPERTY, OBJECT_PROPERTY)); + static final Set annotationPropertyTypes = new TreeSet<>( + Set.of(ENTITY, PROPERTY, ANNOTATION_PROPERTY)); - } + static final Set propertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY)); + static final Set individualTypes = new TreeSet<>(Set.of(ENTITY, INDIVIDUAL)); + public void write(JsonWriter writer) throws IOException { - private void writeNode(JsonWriter writer, OntologyNode c, Set types) throws IOException { + writer.beginObject(); - if(c.types.contains(OntologyNode.NodeType.RDF_LIST)) { + writer.name("ontologyId"); + writer.value(((String) config.get("id")).toLowerCase()); - writer.beginArray(); + writer.name("iri"); + writer.value(ontologyNode.uri); - for(PropertyValue listEntry : RdfListEvaluator.evaluateRdfList(c, this)) { - writePropertyValue(writer, listEntry, null); - } - - writer.endArray(); + for (String configKey : config.keySet()) { + Object configVal = config.get(configKey); - } else { - - writer.beginObject(); + // we include this (lowercased) as "ontologyId" rather than "id", + // so that the name "id" doesn't clash with downstream id fields in neo4j/solr + // + if (configKey.equals("id")) + continue; + + // already included explicitly above + if (configKey.equals("ontologyId")) + continue; + + // don't print the iri from the config, we already printed the one from the OWL + // TODO: which one to keep, or should we keep both? + if (configKey.equals("iri")) + continue; + + // annotated as hasPreferredRoot by PreferredRootsAnnotator, no need to + // duplicate + if (configKey.equals("preferred_root_term")) + continue; + + // everything else from the config is stored as a normal property + writer.name(configKey); + writeGenericValue(writer, configVal); + } - if (c.uri != null) { - writer.name("iri"); - writer.value(c.uri); - } + writeProperties(writer, ontologyNode.properties, Set.of("ontology")); - writeProperties(writer, c.properties, types); - writer.endObject(); - } - } + writer.name("classes"); + writer.beginArray(); - private void writeProperties(JsonWriter writer, PropertySet properties, Set types) throws IOException { + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.CLASS)) { + writeNode(writer, c, OntologyNode.NodeType.toString(classTypes)); + } + } - if(types != null) { - writer.name("type"); - writer.beginArray(); - for(String type : types) { - writer.value(type); - } - writer.endArray(); - } + writer.endArray(); + + writer.name("properties"); + writer.beginArray(); + + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.OBJECT_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(objectPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.ANNOTATION_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(annotationPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.DATA_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(dataPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(propertyTypes)); + } + } - // TODO: sort keys, rdf:type should be first ideally - for (String predicate : properties.getPropertyPredicates()) { + writer.endArray(); - if(types != null && types.contains("ontology") && predicate.equals("ontologyId")) { - // hack to workaround a punning issue. - // if the Ontology is also a Class it will have an ontologyId added by - // the OntologyMetadataAnnotator, but there is already an ontologyId field - // printed as part of the ontology object, so skip this one... - // TODO: fix this as part of the punning refactoring - // - continue; - } - - List values = properties.getPropertyValues(predicate); - - writer.name(predicate); - - if(values.size() == 1) { - writePropertyValue(writer, values.get(0), null); - } else { - writer.beginArray(); - for (PropertyValue value : values) { - writePropertyValue(writer, value, null); - } - writer.endArray(); - } - } - } - - - public void writePropertyValue(JsonWriter writer, PropertyValue value, Set types) throws IOException { - if (value.axioms.size() > 0) { - // reified - writer.beginObject(); - writer.name("type"); - writer.beginArray(); - writer.value("reification"); - writer.endArray(); - writer.name("value"); - writeValue(writer, value); - writer.name("axioms"); - writer.beginArray(); - for(PropertySet axiom : value.axioms) { - writer.beginObject(); - writeProperties(writer, axiom, null); - writer.endObject(); - } - writer.endArray(); - writer.endObject(); - } else { - // not reified - writeValue(writer, value); - } - - } - - private boolean isXMLBuiltinDatatype(String uri) { - return uri.startsWith("http://www.w3.org/2001/XMLSchema#"); - } - public void writeValue(JsonWriter writer, PropertyValue value) throws IOException { - assert (value.axioms == null); - - switch(value.getType()) { - case BNODE: - OntologyNode c = nodes.get(((PropertyValueBNode) value).getId()); - if (c == null) { - // empty bnode values present in some ontologies, see issue #116 - writer.value(""); - } else { - writeNode(writer, c, null); - } - break; - case ID: - break; - case LITERAL: - PropertyValueLiteral literal = (PropertyValueLiteral) value; - writer.beginObject(); - writer.name("type"); - writer.beginArray(); - writer.value("literal"); - writer.endArray(); - if(!literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { - writer.name("datatype"); - writer.value(literal.getDatatype()); - } - writer.name("value"); - writer.value(literal.getValue()); - if(!literal.getLang().equals("")) { - writer.name("lang"); - writer.value(literal.getLang()); - } - writer.endObject(); - break; - case URI: - String uri = ((PropertyValueURI) value).getUri(); - OntologyNode uriNode = nodes.get(uri); - if(uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { - // special case for rdfs:Datatype; nest it as with a bnode instead of referencing - writeNode(writer, uriNode, Set.of("datatype")); - } else { - writer.value(uri); - } - break; - case RELATED: - writer.beginObject(); - writer.name("property"); - writer.value(((PropertyValueRelated) value).getProperty()); - writer.name("value"); - writer.value(((PropertyValueRelated) value).getFiller().uri); - writeProperties(writer, ((PropertyValueRelated) value).getClassExpression().properties, Set.of("related")); - writer.endObject(); - break; - case ANCESTORS: - PropertyValueAncestors ancestors = (PropertyValueAncestors) value; - Set ancestorIris = ancestors.getAncestors(this); - if(ancestorIris.size() == 1) { - writer.value(ancestorIris.iterator().next()); - } else { - writer.beginArray(); - for(String ancestorIri : ancestorIris) { - writer.value(ancestorIri); - } - writer.endArray(); - } - break; - default: - writer.value("?"); - break; - } - } - - - - - - - public Map nodes = new TreeMap<>(); - public OntologyNode ontologyNode = null; - - private OntologyNode getOrCreateNode(Node node) { - String id = nodeIdFromJenaNode(node); - OntologyNode entity = nodes.get(id); - if (entity != null) { - return entity; - } - - entity = new OntologyNode(); - - if(!node.isBlank()) - entity.uri = id; - - nodes.put(id, entity); - return entity; - } - - @Override - public void start() { - - } - - @Override - public void triple(Triple triple) { - - if(triple.getObject().isLiteral()) { - handleLiteralTriple(triple); - } else { - handleNamedNodeTriple(triple); - } - - // TODO: BNodes? - - } - - - public void handleLiteralTriple(Triple triple) { - - String subjId = nodeIdFromJenaNode(triple.getSubject()); - OntologyNode subjNode = getOrCreateNode(triple.getSubject()); - - String lang = triple.getObject().getLiteralLanguage(); - if(lang != null && !lang.equals("")) { - languages.add(lang); - } - - subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); - - } - - public void handleNamedNodeTriple(Triple triple) { - - OntologyNode subjNode = getOrCreateNode(triple.getSubject()); - - switch (triple.getPredicate().getURI()) { - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": - handleType(subjNode, triple.getObject()); - break; - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest": - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#first": - subjNode.types.add(OntologyNode.NodeType.RDF_LIST); - break; + writer.name("individuals"); + writer.beginArray(); - case "http://www.w3.org/2002/07/owl#imports": - importUrls.add(triple.getObject().getURI()); - break; - } + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + writeNode(writer, c, OntologyNode.NodeType.toString(individualTypes)); + } + } - subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); + writer.endArray(); + writer.endObject(); - } + } - public void handleType(OntologyNode subjNode, Node type) { + private void writeNode(JsonWriter writer, OntologyNode c, Set types) throws IOException { - if(!type.isURI()) - return; + if (c.types.contains(OntologyNode.NodeType.RDF_LIST)) { - switch (type.getURI()) { + writer.beginArray(); - case "http://www.w3.org/2002/07/owl#Ontology": + for (PropertyValue listEntry : RdfListEvaluator.evaluateRdfList(c, this)) { + writePropertyValue(writer, listEntry, null); + } - subjNode.types.add(OntologyNode.NodeType.ONTOLOGY); + writer.endArray(); - if(ontologyNode == null) { - ontologyNode = subjNode; - } + } else { - break; + writer.beginObject(); - case "http://www.w3.org/2002/07/owl#Class": - case "http://www.w3.org/2000/01/rdf-schema#Class": - case "http://www.w3.org/2004/02/skos/core#Concept": - case "http://www.w3.org/2004/02/skos/core#ConceptScheme": - subjNode.types.add(OntologyNode.NodeType.CLASS); - if(subjNode.uri != null) { - ++ numberOfClasses; - } + if (c.uri != null) { + writer.name("iri"); + writer.value(c.uri); + } - break; + writeProperties(writer, c.properties, types); + writer.endObject(); + } + } - case "http://www.w3.org/2002/07/owl#AnnotationProperty": - subjNode.types.add(OntologyNode.NodeType.ANNOTATION_PROPERTY); - addAddAndCountProperties(subjNode); - break; + private void writeProperties(JsonWriter writer, PropertySet properties, Set types) throws IOException { - case "http://www.w3.org/2002/07/owl#ObjectProperty": - subjNode.types.add(OntologyNode.NodeType.OBJECT_PROPERTY); - addAddAndCountProperties(subjNode); - break; - case "http://www.w3.org/2002/07/owl#DatatypeProperty": - subjNode.types.add(OntologyNode.NodeType.DATA_PROPERTY); - addAddAndCountProperties(subjNode); - break; - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#Property": - addAddAndCountProperties(subjNode); - break; + if (types != null) { + writer.name("type"); + writer.beginArray(); + for (String type : types) { + writer.value(type); + } + writer.endArray(); + } - case "http://www.w3.org/2002/07/owl#NamedIndividual": - subjNode.types.add(OntologyNode.NodeType.INDIVIDUAL); + // TODO: sort keys, rdf:type should be first ideally + for (String predicate : properties.getPropertyPredicates()) { + + if (types != null && types.contains("ontology") && predicate.equals("ontologyId")) { + // hack to workaround a punning issue. + // if the Ontology is also a Class it will have an ontologyId added by + // the OntologyMetadataAnnotator, but there is already an ontologyId field + // printed as part of the ontology object, so skip this one... + // TODO: fix this as part of the punning refactoring + // + continue; + } + + List values = properties.getPropertyValues(predicate); + + writer.name(predicate); + + if (values.size() == 1) { + writePropertyValue(writer, values.get(0), null); + } else { + writer.beginArray(); + for (PropertyValue value : values) { + writePropertyValue(writer, value, null); + } + writer.endArray(); + } + } + } - if(subjNode.uri != null) { - ++ numberOfIndividuals; - } + public void writePropertyValue(JsonWriter writer, PropertyValue value, Set types) throws IOException { + if (value.axioms.size() > 0) { + // reified + writer.beginObject(); + writer.name("type"); + writer.beginArray(); + writer.value("reification"); + writer.endArray(); + writer.name("value"); + writeValue(writer, value); + writer.name("axioms"); + writer.beginArray(); + for (PropertySet axiom : value.axioms) { + writer.beginObject(); + writeProperties(writer, axiom, null); + writer.endObject(); + } + writer.endArray(); + writer.endObject(); + } else { + // not reified + writeValue(writer, value); + } - break; + } - case "http://www.w3.org/2002/07/owl#Axiom": - subjNode.types.add(OntologyNode.NodeType.AXIOM); - break; + private boolean isXMLBuiltinDatatype(String uri) { + return uri.startsWith("http://www.w3.org/2001/XMLSchema#"); + } - case "http://www.w3.org/2002/07/owl#Restriction": - subjNode.types.add(OntologyNode.NodeType.RESTRICTION); - break; - - case "http://www.w3.org/2002/07/owl#AllDisjointClasses": - subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_CLASSES); - break; - case "http://www.w3.org/2002/07/owl#AllDisjointProperties": - subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES); - break; - case "http://www.w3.org/2002/07/owl#AllDifferent": - subjNode.types.add(OntologyNode.NodeType.ALL_DIFFERENT); - break; - case "http://www.w3.org/2002/07/owl#NegativePropertyAssertion": - subjNode.types.add(OntologyNode.NodeType.NEGATIVE_PROPERTY_ASSERTION); - break; - - case "http://www.w3.org/2000/01/rdf-schema#Datatype": - subjNode.types.add(OntologyNode.NodeType.DATATYPE); - break; - } - } - - private void addAddAndCountProperties(OntologyNode subjNode) { - subjNode.types.add(OntologyNode.NodeType.PROPERTY); - - if (subjNode.uri != null) { - ++numberOfProperties; - } - } - - @Override - public void quad(Quad quad) { - - } - - @Override - public void base(String s) { - - } - - @Override - public void prefix(String s, String s1) { - - } - - @Override - public void finish() { - - } - - - public String nodeIdFromJenaNode(Node node) { - if(node.isURI()) { - return node.getURI(); - } - if(node.isBlank()) { - return node.getBlankNodeId().toString(); - } - throw new RuntimeException("unknown node type"); - } - - public String nodeIdFromPropertyValue(PropertyValue node) { - if(node.getType() == PropertyValue.Type.URI) { - return ((PropertyValueURI) node).getUri(); - } - if(node.getType() == PropertyValue.Type.BNODE) { - return ((PropertyValueBNode) node).getId(); - } - throw new RuntimeException("unknown node type"); - } - - - - private static void writeGenericValue(JsonWriter writer, Object val) throws IOException { - - if(val instanceof Collection) { - writer.beginArray(); - for(Object entry : ((Collection) val)) { - writeGenericValue(writer, entry); - } - writer.endArray(); - } else if(val instanceof Map) { - Map map = new TreeMap ( (Map) val ); - writer.beginObject(); - for(String k : map.keySet()) { - writer.name(k); - writeGenericValue(writer, map.get(k)); - } - writer.endObject(); - } else if(val instanceof String) { - writer.value((String) val); - } else if(val instanceof Integer) { - writer.value((Integer) val); - } else if(val instanceof Double) { - writer.value((Double) val); - } else if(val instanceof Long) { - writer.value((Long) val); - } else if(val instanceof Boolean) { - writer.value((Boolean) val); - } else if(val == null) { - writer.nullValue(); - } else { - throw new RuntimeException("Unknown value type"); - } - - } - - - public boolean areSubgraphsIsomorphic(PropertyValue rootNodeA, PropertyValue rootNodeB) { - - OntologyNode a = nodes.get(nodeIdFromPropertyValue(rootNodeA)); - OntologyNode b = nodes.get(nodeIdFromPropertyValue(rootNodeB)); - - if(! a.properties.getPropertyPredicates().equals( b.properties.getPropertyPredicates() )) { - return false; + public void writeValue(JsonWriter writer, PropertyValue value) throws IOException { + assert (value.axioms == null); + + switch (value.getType()) { + case BNODE: + OntologyNode c = nodes.get(((PropertyValueBNode) value).getId()); + if (c == null) { + // empty bnode values present in some ontologies, see issue #116 + writer.value(""); + } else { + writeNode(writer, c, null); + } + break; + case ID: + break; + case LITERAL: + PropertyValueLiteral literal = (PropertyValueLiteral) value; + writer.beginObject(); + writer.name("type"); + writer.beginArray(); + writer.value("literal"); + writer.endArray(); + if (!literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { + writer.name("datatype"); + writer.value(literal.getDatatype()); + } + writer.name("value"); + writer.value(literal.getValue()); + if (!literal.getLang().equals("")) { + writer.name("lang"); + writer.value(literal.getLang()); + } + writer.endObject(); + break; + case URI: + String uri = ((PropertyValueURI) value).getUri(); + OntologyNode uriNode = nodes.get(uri); + if (uriNode != null && !isXMLBuiltinDatatype(uri) + && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { + // special case for rdfs:Datatype; nest it as with a bnode instead of + // referencing + writeNode(writer, uriNode, Set.of("datatype")); + } else { + writer.value(uri); + } + break; + case RELATED: + writer.beginObject(); + writer.name("property"); + writer.value(((PropertyValueRelated) value).getProperty()); + writer.name("value"); + writer.value(((PropertyValueRelated) value).getFiller().uri); + writeProperties(writer, ((PropertyValueRelated) value).getClassExpression().properties, Set.of("related")); + writer.endObject(); + break; + case ANCESTORS: + PropertyValueAncestors ancestors = (PropertyValueAncestors) value; + Set ancestorIris = ancestors.getAncestors(this); + if (ancestorIris.size() == 1) { + writer.value(ancestorIris.iterator().next()); + } else { + writer.beginArray(); + for (String ancestorIri : ancestorIris) { + writer.value(ancestorIri); + } + writer.endArray(); + } + break; + default: + writer.value("?"); + break; + } } - for(String predicate : a.properties.getPropertyPredicates()) { - List valuesA = a.properties.getPropertyValues(predicate); - List valuesB = b.properties.getPropertyValues(predicate); + public Map nodes = new TreeMap<>(); + public OntologyNode ontologyNode = null; - if(valuesA.size() != valuesB.size()) - return false; + private OntologyNode getOrCreateNode(Node node) { + String id = nodeIdFromJenaNode(node); + OntologyNode entity = nodes.get(id); + if (entity != null) { + return entity; + } - for(int n = 0; n < valuesA.size(); ++ n) { - PropertyValue valueA = valuesA.get(n); - PropertyValue valueB = valuesB.get(n); + entity = new OntologyNode(); - if(valueA.getType() != PropertyValue.Type.BNODE) { - // non bnode value, simple case - if(!valueA.equals(valueB)) { - return false; - } - } + if (!node.isBlank()) + entity.uri = id; - // bnode value + nodes.put(id, entity); + return entity; + } - if(valueB.getType() != PropertyValue.Type.BNODE) - return false; + @Override + public void start() { - if(!areSubgraphsIsomorphic(valueA, valueB)) - return false; + } + + @Override + public void triple(Triple triple) { + + if (triple.getObject().isLiteral()) { + handleLiteralTriple(triple); + } else { + handleNamedNodeTriple(triple); + } + + // TODO: BNodes? + + } + + public void handleLiteralTriple(Triple triple) { + + String subjId = nodeIdFromJenaNode(triple.getSubject()); + OntologyNode subjNode = getOrCreateNode(triple.getSubject()); + + String lang = triple.getObject().getLiteralLanguage(); + if (lang != null && !lang.equals("")) { + languages.add(lang); + } + + subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); + + } + + public void handleNamedNodeTriple(Triple triple) { + + OntologyNode subjNode = getOrCreateNode(triple.getSubject()); + + switch (triple.getPredicate().getURI()) { + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": + handleType(subjNode, triple.getObject()); + break; + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest": + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#first": + subjNode.types.add(OntologyNode.NodeType.RDF_LIST); + break; + + case "http://www.w3.org/2002/07/owl#imports": + importUrls.add(triple.getObject().getURI()); + break; + } + + subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); + + } + + public void handleType(OntologyNode subjNode, Node type) { + + if (!type.isURI()) + return; + + switch (type.getURI()) { + + case "http://www.w3.org/2002/07/owl#Ontology": + + subjNode.types.add(OntologyNode.NodeType.ONTOLOGY); + + if (ontologyNode == null) { + ontologyNode = subjNode; + } + + break; + + case "http://www.w3.org/2002/07/owl#Class": + case "http://www.w3.org/2000/01/rdf-schema#Class": + case "http://www.w3.org/2004/02/skos/core#Concept": + case "http://www.w3.org/2004/02/skos/core#ConceptScheme": + subjNode.types.add(OntologyNode.NodeType.CLASS); + if (subjNode.uri != null) { + ++numberOfClasses; + } + + break; + + case "http://www.w3.org/2002/07/owl#AnnotationProperty": + subjNode.types.add(OntologyNode.NodeType.ANNOTATION_PROPERTY); + addAddAndCountProperties(subjNode); + break; + + case "http://www.w3.org/2002/07/owl#ObjectProperty": + subjNode.types.add(OntologyNode.NodeType.OBJECT_PROPERTY); + addAddAndCountProperties(subjNode); + break; + case "http://www.w3.org/2002/07/owl#DatatypeProperty": + subjNode.types.add(OntologyNode.NodeType.DATA_PROPERTY); + addAddAndCountProperties(subjNode); + break; + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#Property": + addAddAndCountProperties(subjNode); + break; + + case "http://www.w3.org/2002/07/owl#NamedIndividual": + subjNode.types.add(OntologyNode.NodeType.INDIVIDUAL); + + if (subjNode.uri != null) { + ++numberOfIndividuals; + } + + break; + + case "http://www.w3.org/2002/07/owl#Axiom": + subjNode.types.add(OntologyNode.NodeType.AXIOM); + break; + + case "http://www.w3.org/2002/07/owl#Restriction": + subjNode.types.add(OntologyNode.NodeType.RESTRICTION); + break; + + case "http://www.w3.org/2002/07/owl#AllDisjointClasses": + subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_CLASSES); + break; + case "http://www.w3.org/2002/07/owl#AllDisjointProperties": + subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES); + break; + case "http://www.w3.org/2002/07/owl#AllDifferent": + subjNode.types.add(OntologyNode.NodeType.ALL_DIFFERENT); + break; + case "http://www.w3.org/2002/07/owl#NegativePropertyAssertion": + subjNode.types.add(OntologyNode.NodeType.NEGATIVE_PROPERTY_ASSERTION); + break; + + case "http://www.w3.org/2000/01/rdf-schema#Datatype": + subjNode.types.add(OntologyNode.NodeType.DATATYPE); + break; + } + } + + private void addAddAndCountProperties(OntologyNode subjNode) { + subjNode.types.add(OntologyNode.NodeType.PROPERTY); + + if (subjNode.uri != null) { + ++numberOfProperties; + } + } + + @Override + public void quad(Quad quad) { + + } + + @Override + public void base(String s) { + + } + + @Override + public void prefix(String s, String s1) { + + } + + @Override + public void finish() { + + } + + public String nodeIdFromJenaNode(Node node) { + if (node.isURI()) { + return node.getURI(); + } + if (node.isBlank()) { + return node.getBlankNodeId().toString(); } + throw new RuntimeException("unknown node type"); } - return true; - } + public String nodeIdFromPropertyValue(PropertyValue node) { + if (node.getType() == PropertyValue.Type.URI) { + return ((PropertyValueURI) node).getUri(); + } + if (node.getType() == PropertyValue.Type.BNODE) { + return ((PropertyValueBNode) node).getId(); + } + throw new RuntimeException("unknown node type"); + } + + private static void writeGenericValue(JsonWriter writer, Object val) throws IOException { + + if (val instanceof Collection) { + writer.beginArray(); + for (Object entry : ((Collection) val)) { + writeGenericValue(writer, entry); + } + writer.endArray(); + } else if (val instanceof Map) { + Map map = new TreeMap((Map) val); + writer.beginObject(); + for (String k : map.keySet()) { + writer.name(k); + writeGenericValue(writer, map.get(k)); + } + writer.endObject(); + } else if (val instanceof String) { + writer.value((String) val); + } else if (val instanceof Integer) { + writer.value((Integer) val); + } else if (val instanceof Double) { + writer.value((Double) val); + } else if (val instanceof Long) { + writer.value((Long) val); + } else if (val instanceof Boolean) { + writer.value((Boolean) val); + } else if (val == null) { + writer.nullValue(); + } else { + throw new RuntimeException("Unknown value type"); + } + + } + public boolean areSubgraphsIsomorphic(PropertyValue rootNodeA, PropertyValue rootNodeB) { - public OntologyNode getNodeForPropertyValue(PropertyValue value) { + OntologyNode a = nodes.get(nodeIdFromPropertyValue(rootNodeA)); + OntologyNode b = nodes.get(nodeIdFromPropertyValue(rootNodeB)); - switch(value.getType()) { - case URI: - return nodes.get( ((PropertyValueURI) value).getUri() ); - case BNODE: - return nodes.get( ((PropertyValueBNode) value).getId() ); - default: - throw new RuntimeException("not a node"); - } - } + if (!a.properties.getPropertyPredicates().equals(b.properties.getPropertyPredicates())) { + return false; + } + + for (String predicate : a.properties.getPropertyPredicates()) { + List valuesA = a.properties.getPropertyValues(predicate); + List valuesB = b.properties.getPropertyValues(predicate); + + if (valuesA.size() != valuesB.size()) + return false; + + for (int n = 0; n < valuesA.size(); ++n) { + PropertyValue valueA = valuesA.get(n); + PropertyValue valueB = valuesB.get(n); + + if (valueA.getType() != PropertyValue.Type.BNODE) { + // non bnode value, simple case + if (!valueA.equals(valueB)) { + return false; + } + } + + // bnode value + + if (valueB.getType() != PropertyValue.Type.BNODE) + return false; + + if (!areSubgraphsIsomorphic(valueA, valueB)) + return false; + } + } + + return true; + } + + public OntologyNode getNodeForPropertyValue(PropertyValue value) { + + switch (value.getType()) { + case URI: + return nodes.get(((PropertyValueURI) value).getUri()); + case BNODE: + return nodes.get(((PropertyValueBNode) value).getId()); + default: + throw new RuntimeException("not a node"); + } + } } From 2a186af326308f308d92e67fd9db890c127f5cc0 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 14 Jun 2024 12:49:56 +0200 Subject: [PATCH 053/146] handled several buggy cases in query creation for #14 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 188 ++++++++++++++++-- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 5 + 2 files changed, 180 insertions(+), 13 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 630d1649e..055487376 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -21,14 +21,12 @@ public class ImportCSV { static FileReader fr; static BufferedReader br; - public static List showFiles(File[] files) throws IOException { + public static List listFiles(File[] files) throws IOException { List fileList = new ArrayList(); for (File file : files) { if (file.isDirectory()) { - System.out.println("Directory: " + file.getAbsolutePath()); - fileList.addAll(showFiles(file.listFiles())); + fileList.addAll(listFiles(file.listFiles())); } else { - System.out.println("File: " + file.getAbsolutePath()); fileList.add(file); } } @@ -36,7 +34,7 @@ public static List showFiles(File[] files) throws IOException { return fileList; } - public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException { + public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException, java.text.ParseException { for (File file : files){ if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) @@ -48,11 +46,60 @@ public static void generateCreationQueries(List files, Session session, bo if (line != null) titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String[] pieces = null; + StringBuilder sb = new StringBuilder(); + boolean started = false; while((line = br.readLine())!=null){ - System.out.println(line); - pieces = split(line,",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + String appendedLine = ""; + + if (line.startsWith("\"") && line.endsWith("\"")){ + if(started){ + if (line.startsWith("\",\"") && !sb.toString().isEmpty()) { + sb.append(line); + appendedLine = sb.toString(); + sb.setLength(0); + started = false; + } + else + throw new IOException("file: "+file+" - line: "+line); + } + else + appendedLine = line; + } else if (line.startsWith("\"") && !line.endsWith("\"")){ + if(started){ + if (line.startsWith("\",\"")) { + sb.append(line); + } + else + throw new IOException("file: "+file+" - line: "+line); + } + else { + sb.append(line); + started = true; + } + } else if (!line.startsWith("\"") && !line.endsWith("\"")){ + if(!started) + throw new IOException("file: "+file+" - line: "+line); + else + sb.append(line); + + } else if (!line.startsWith("\"") && line.endsWith("\"") && !sb.toString().isEmpty()){ + if(!started) + throw new IOException("file: "+file+" - line: "+line); + else { + sb.append(line); + appendedLine = sb.toString(); + sb.setLength(0); + started = false; + } + } + + if (appendedLine.isEmpty()) + continue; + + pieces = split(appendedLine, List.of(titles).indexOf("\"_json\""),titles.length,",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String query = generateNodeCreationQuery(titles,pieces); - System.out.println("query: "+query); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+appendedLine+" in file: "+file); if(safe){ try (Transaction tx = session.beginTransaction()) { tx.run(query); @@ -79,11 +126,58 @@ public static void generateCreationQueries(List files, Session session, bo if (line != null) titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String[] pieces = null; + StringBuilder sb = new StringBuilder(); + boolean started = false; while((line = br.readLine())!=null){ - System.out.println(line); - pieces = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); + String appendedLine = ""; + if (line.startsWith("\"") && line.endsWith("\"")){ + if(started){ + if (line.startsWith("\",\"") && !sb.toString().isEmpty()) { + sb.append(line); + appendedLine = sb.toString(); + sb.setLength(0); + started = false; + } + else + throw new IOException("file: "+file+" - line: "+line); + } + else + appendedLine = line; + } else if (line.startsWith("\"") && !line.endsWith("\"")){ + if(started){ + if (line.startsWith("\",\"")) { + sb.append(line); + } + else + throw new IOException("file: "+file+" - line: "+line); + } + else { + sb.append(line); + started = true; + } + } else if (!line.startsWith("\"") && !line.endsWith("\"")){ + if(!started) + throw new IOException("file: "+file+" - line: "+line); + else + sb.append(line); + } else if (!line.startsWith("\"") && line.endsWith("\"") && !sb.toString().isEmpty()){ + if(!started) + throw new IOException("file: "+file+" - line: "+line); + else { + sb.append(line); + appendedLine = sb.toString(); + sb.setLength(0); + started = false; + } + } + + if (appendedLine.isEmpty()) + continue; + + pieces = appendedLine.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); String query = generateRelationCreationQuery(titles,pieces); - System.out.println("query: "+query); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+appendedLine+" in file: "+file); if(safe){ try (Transaction tx = session.beginTransaction()) { tx.run(query); @@ -102,7 +196,7 @@ public static void generateCreationQueries(List files, Session session, bo } } - public static String[] split(String input, String regex){ + public static String[] split(String input, int jsonIndex,int titlesLength, String regex) throws java.text.ParseException { String[] tokens = {}; char c = '{'; char d = '\"'; @@ -139,6 +233,72 @@ else if(i==beforeArray.length) else tokens[i] = afterArray[i-(beforeArray.length+1)]; } + } else if (countLeftCurly >= 1 && countRightCurly >= countLeftCurly){ + String before = ""; + String after = ""; + String json = ""; + int start = 0; + int end = 0; + + int countDoubleQuotes = 0; + int countCommas = 0; + for (int i = 0; i < input.length(); i++){ + if (input.charAt(i) == '"'){ + countDoubleQuotes++; + if (countDoubleQuotes % 2 == 0) + if(input.charAt(i+1) == ',') + countCommas++; + } + + if (countDoubleQuotes >= 2*jsonIndex && countCommas == jsonIndex){ + before = input.substring(0,i+1); + start = i+1; + break; + } + + } + + countDoubleQuotes = 0; + countCommas = 0; + for (int j = input.length()-1;j>-1;j--){ + if (input.charAt(j) == '"'){ + countDoubleQuotes++; + if (countDoubleQuotes % 2 == 0) + if(input.charAt(j-1) == ',') + countCommas++; + } + + if (countDoubleQuotes >= 2*(titlesLength - jsonIndex -1) && countCommas == titlesLength - jsonIndex -1){ + after = input.substring(j); + end = j; + break; + } + } + + json = input.substring(start,end); + + String[] beforeArray = before.split(regex); + String[] afterArray = after.split(regex); + int length = beforeArray.length + 1 + afterArray.length; + + if (length == titlesLength){ + tokens = new String[length]; + for (int i =0;i files = showFiles(dir.listFiles()); + List files = listFiles(dir.listFiles()); if(cmd.hasOption("s")) generateCreationQueries(files,session,true); else @@ -207,6 +367,8 @@ public static void main(String... args) throws IOException, ParseException { e.printStackTrace(); } + } catch (java.text.ParseException e) { + throw new RuntimeException(e); } } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index fb078979c..7b3d64066 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -75,6 +75,11 @@ public static String generateRelationCreationQuery(String[] titles, String[] val .append("CREATE (n)-[:") .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") .append("]->(m)"); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + for (String value : values) + System.out.println("value: "+value); } return sb.toString(); From 061f546cda521a536c7734681d89a70b4a3c1a28 Mon Sep 17 00:00:00 2001 From: deepananbu Date: Tue, 18 Jun 2024 11:25:24 +0200 Subject: [PATCH 054/146] added logic to convert ontologies to rdf format. Added "convertToRDF" as a optional command line argument. --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 64 +++++++++---------- .../java/uk/ac/ebi/rdf2json/RDF2JSON.java | 7 +- 2 files changed, 35 insertions(+), 36 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index e06b95815..ee5d14276 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -15,7 +15,6 @@ import java.io.IOException; import java.io.InputStream; import java.net.HttpURLConnection; -import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.nio.file.Files; @@ -101,7 +100,7 @@ private RDFParserBuilder createParser(Lang lang) { } } - private void parseRDF(String url) { + private void parseRDF(String url, boolean convertToRDF) { try { if (loadLocalFiles && !url.contains("://")) { @@ -135,21 +134,19 @@ private void parseRDF(String url) { } else { System.out.println("Downloading (no predownload path provided) " + url); - - - /* - * String outputFile = "/home/anbalagand/rdftoJSON/testRobot/result"; - * OWLOntology ont = downloadToLocal(url, outputFile); OWLDocumentFormat odf = - * ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = - * odf.getKey(); String ext = ".owl"; if(lang1.contains("Turtle")) ext = ".ttl"; - * url = outputFile+ext; - */ - - sourceFileTimestamp = System.currentTimeMillis(); - - - createParser(null).source(url).parse(this); + if (convertToRDF) { + String outputFile = "./src/main/resources/result"; + OWLOntology ont = convertOntologyToRDF(url, outputFile); + OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); + String lang1 = odf.getKey(); + String ext = ".owl"; + if (lang1.contains("Turtle")) + ext = ".ttl"; + url = outputFile + ext; + } + createParser(null).source(url).parse(this); + sourceFileTimestamp = System.currentTimeMillis(); } } } catch (FileNotFoundException e) { @@ -160,7 +157,7 @@ private void parseRDF(String url) { } } - private OWLOntology downloadToLocal(String url, String outputFile) throws IOException { + private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); FileOutputStream fos = null; OWLOntology ont = null; @@ -177,14 +174,9 @@ private OWLOntology downloadToLocal(String url, String outputFile) throws IOExce is = tempURL.openStream(); } catch (IOException e) { isDefaultURLFailed = true; - if (con instanceof HttpsURLConnection) { - url = url.replace("https:", "http:"); - } else if (con instanceof HttpURLConnection) { - url = url.replace("http:", "https:"); - } - } if (isDefaultURLFailed) { + url = replaceURLByProtocol(con, url); try { is = new URL(url).openStream(); } catch (IOException e) { @@ -195,15 +187,10 @@ private OWLOntology downloadToLocal(String url, String outputFile) throws IOExce ont = ontManager.loadOntologyFromOntologyDocument(is); } catch (Exception e) { isParserException = true; - if (con instanceof HttpsURLConnection) { - url = url.replace("https:", "http:"); - } else if (con instanceof HttpURLConnection) { - url = url.replace("http:", "https:"); - } } if (isParserException) { - + url = replaceURLByProtocol(con, url); try { is = new URL(url).openStream(); } catch (IOException e) { @@ -230,13 +217,10 @@ else if (!lang1.contains("RDF")) { } } catch (OWLOntologyCreationException e) { - // TODO Auto-generated catch block e.printStackTrace(); } catch (IOException e) { - // TODO Auto-generated catch block e.printStackTrace(); } catch (OWLOntologyStorageException e) { - // TODO Auto-generated catch block e.printStackTrace(); } finally { if (fos != null) @@ -247,6 +231,16 @@ else if (!lang1.contains("RDF")) { } return ont; } + + private String replaceURLByProtocol(URLConnection con, String url) { + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + return url; + + } private String urlToFilename(String url) { return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); @@ -256,7 +250,7 @@ private String urlToFilename(String url) { String downloadedPath; - OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath) { + OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath, boolean convertToRDF) { this.loadLocalFiles = loadLocalFiles; this.downloadedPath = downloadedPath; @@ -294,7 +288,7 @@ private String urlToFilename(String url) { } System.out.println("load ontology from: " + url); - parseRDF(url); + parseRDF(url, convertToRDF); // Before we evaluate imports, mark all the nodes so far as not imported for (String id : nodes.keySet()) { @@ -309,7 +303,7 @@ private String urlToFilename(String url) { importUrls.remove(0); System.out.println("import: " + importUrl); - parseRDF(importUrl); + parseRDF(importUrl, convertToRDF); } // Now the imports are done, mark everything else as imported diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java index 4d1cc9f23..52cea9dd8 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java @@ -47,6 +47,10 @@ public static void main(String[] args) throws IOException { Option noDates = new Option(null, "noDates", false, "Set to leave LOADED dates blank (for testing)"); noDates.setRequired(false); options.addOption(noDates); + + Option rdfConvert = new Option(null, "convertToRDF", true, "Whether or not to convert the ontology to RDF/Xml format before parsing."); + rdfConvert.setRequired(false); + options.addOption(rdfConvert); CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); @@ -69,6 +73,7 @@ public static void main(String[] args) throws IOException { boolean bLoadLocalFiles = cmd.hasOption("loadLocalFiles"); boolean bNoDates = cmd.hasOption("noDates"); String mergeOutputWith = cmd.getOptionValue("mergeOutputWith"); + boolean convertToRDF = cmd.hasOption("convertToRDF"); System.out.println("Configs: " + configFilePaths); @@ -140,7 +145,7 @@ public static void main(String[] args) throws IOException { try { - OntologyGraph graph = new OntologyGraph(ontoConfig, bLoadLocalFiles, bNoDates, downloadedPath); + OntologyGraph graph = new OntologyGraph(ontoConfig, bLoadLocalFiles, bNoDates, downloadedPath, convertToRDF); if(graph.ontologyNode == null) { System.out.println("No Ontology node found; nothing will be written"); From 20bcaf9a00e34991f539d46ecf0738eede0e758b Mon Sep 17 00:00:00 2001 From: deepananbu Date: Tue, 18 Jun 2024 11:25:59 +0200 Subject: [PATCH 055/146] owl & ttl files required for the conversion --- .../rdf2json/src/main/resources/result.owl | 854 +++++++++ .../rdf2json/src/main/resources/result.ttl | 1529 +++++++++++++++++ 2 files changed, 2383 insertions(+) create mode 100644 dataload/rdf2json/src/main/resources/result.owl create mode 100644 dataload/rdf2json/src/main/resources/result.ttl diff --git a/dataload/rdf2json/src/main/resources/result.owl b/dataload/rdf2json/src/main/resources/result.owl new file mode 100644 index 000000000..d205a6a80 --- /dev/null +++ b/dataload/rdf2json/src/main/resources/result.owl @@ -0,0 +1,854 @@ + + + + + + + This ontology is designed to represent many of the relations (i.e. object properties) that hold between entities at the level of the mid-level Common Core Ontologies. + Extended Relation Ontology + Version 1.5 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + An Alternative Label that consists of a shortened or abbreviated form of the rdfs:label and is used to denote the entity. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + acronym + + + + + + + + + A term or phrase that may be used in place of the stated rdfs:label to denote the entity in question. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + alternative label + + + + + + + + The name and description of the license under which the .owl file is released. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + code license + + + + + + + + The name and description of the license under which the ideas, concepts and other informational content expressed in the .owl file are released. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + content license + + + + + + + + An assertion of copyright + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + copyright + + + + + + + + A natural language explication of the meaning of the term. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + definition + + + + + + + + A citation of where all or some of the information used to create the term's Definition was acquired from. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + definition source + + + + + + + + A name or other identifier that is used to designate an individual. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + designator annotation + + + + + + + + An Acronym that is used by a Doctrinal Source to denote the entity. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + doctrinal acronym + + + + + + + + + A Definition that is taken directly from a Doctrinal Source. + There is only one definition for any given term in an ontology; however, a Doctrinal Definition may be provided in addition to the asserted Definition if the preservation of this information is important. When both a Definition and a Doctrinal Definition are provided for a term, the Definition takes precedence. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + doctrinal definition + + + + + + + + + An Alternative Label that consists of the preferred term or phrase used by a Doctrinal Source to denote the entity. + When the cco:doctrinal_label is identical to the rdfs:label, the cco:doctrinal_label annotation is superfluous. As a subclass of 'alternative label', 'doctrinal label' is intended to be used to provide additional information about the entity when its preferred doctrinal designator is ambiguous or otherwise inappropriate for use as the rdfs:label. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + doctrinal label + + + + + + + + + A Definition Source that consists of a formalized doctrine in which the term is authoritatively defined. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + doctrinal source + + + + + + + + + A clarification or further explanation of a term beyond what is included in the Definition or which is used when the term is primitive such that no non-circular definition can be given for it. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + elucidation + + + + + + + + A phrase, sentence or set of terms intended to convey the conventional usage of the term. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + example of usage + + + + + + + + A relation between an information content entity and a widely used measurement unit of the token used to express it. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has token unit + + + + + + + + The text of an HTTP request that can be sent to a SPARQL Protocol service. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + http query string + + + + + + + + A interval measurement value of an instance of a quality, realizable or process profile + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + interval measurement annotation + + + + + + + + + An annotation property that links a class, property, or named individual to the URI of the ontology where it is located. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is curated in ontology + + + + + + + + A relation between an information content entity and a widely used token used to express it. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is tokenized by + + + + + + + + A measurement value of an instance of a quality, reazlizable or process profile + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + measurement annotation + + + + + + + + A nominal measurement value of an instance of a quality, realizable or process profile + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + nominal measurement annotation + + + + + + + + + An ordinal measurement value of an instance of a quality, realizable or process profile + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + ordinal measurement annotation + + + + + + + + + The text of a query that is associated with a class + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + query text + + + + + + + + A ratio measurement value of an instance of a quality, realizable or process profile + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + ratio measurement annotation + + + + + + + + + The name of the Term Editor who added the term to the ontology. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + term creator + + + + + + + + + The name of a person who contributed to the development or enhancement of the term. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + term editor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + y is_accessory_in x iff x is an instance of Process and y is an instance of Agent, such that y assists another agent in the commission of x, and y was not located at the location of x when x occurred, and y was not an agent_in x. + http://en.wikipedia.org/wiki/Accessory_(legal_term) + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + accessory in + + + + + + + + + + + + An agent a1 is accomplice_in some Processual Entity p1 iff a1 assists in the commission of p1, is located at the location of p1, but is not agent_in p1. + https://en.wikipedia.org/w/index.php?title=Accomplice&oldid=1002047204 + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + accomplice in + + + + + + + + + + + + p affects c iff p is an instance of a Process and c is an instance of a Continuant, such that p influences c in some manner, most often by producing a change in c. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + affects + + + + + + + + + + + + + + + + + + x aggregate_bearer_of y iff x is an instance of Object Aggregate and y is an instance of Specifically Dependent Continuant and z is an instance of Object, such that z bearer of y, and all other members of x are bearers of a unique instance of the same type as y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + aggregate bearer of + + + + + + + + + + + + x aggregate_has_disposition y iff x is an instance of Object Aggregate and y is an instance of Disposition, such that x aggregate_bearer_of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + aggregate has disposition + + + + + + + + + + + x aggregate_has_quality y iff x is an instance of Object Aggregate and y is an instance of Quality, such that x aggregate_bearer_of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + aggregate has quality + + + + + + + + + + + x aggregate_has_role y iff x is an instance of Object Aggregate and y is an instance of Role, such that x aggregate_bearer_of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + aggregate has role + + + + + + + + + + + x caused_by y iff x and y are instances of occurrents, and x is a consequence of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + caused by + + + + + + + + + + + x disposition_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Disposition, such that x disposition_of_aggregate y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + disposition of aggregate + + + + + + + + + + + 2022-12-30T21:32:27-05:00 + https://cubrc.org + A relation where one process disrupts another process from occurring as it would have. + A process can disrupt another process from occurring as it would have by 1) preventing a disposition or role from being realized by that process, 2) lowering the grade of the process, or 3) stopping the process from continuing to occur. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/AgentOntology + disrupts + To lower the grade of a process is to lower the quality of a process according to some standard, for example when realizing a capability or a function. + disrupts + + + + + + + + + + + x has_accessory y iff x is an instance of Process and y is an instance of Agent, such that y assists another agent in the commission of x, and y was not located at the location of x when x occurred, and y was not an agent_in x. + http://en.wikipedia.org/wiki/Accessory_(legal_term) + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has accessory + + + + + + + + + + + A Processual Entity p1 has_accomplice some agent a1 iff a1 assists in the commission of p1, is located at the location of p1, but is not agent_in p1. + https://en.wikipedia.org/w/index.php?title=Accomplice&oldid=1002047204 + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has accomplice + + + + + + + + + + + + y has_input x iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the beginning of y is a necessary condition for the start of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has input + + + + + + + + + + An instance of an Object Aggregate 'has member of located in' an instance of some material entity if and only if every member of that Aggregate is located in the same instance of that material entity. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has member of located in + + + + + + + + + + If p is a process and c is a continuant, then p has object c if and only if the c is part of the projected state that the agent intends to achieve by performing p. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has object + + + + + + + + + + + + y has_output x iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the end of y is a necessary condition for the completion of y. + https://en.wikipedia.org/w/index.php?title=IPO_model&oldid=1024398398 + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has output + + + + + + + + + + + + x has_process_part y iff x and y are instances of Process, such that y occurs during the temporal interval of x, and y either provides an input to x or receives an output of x, or both. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + has process part + + + + + + + + + + + + + + + + + x inheres_in_aggregate y iff x is an instance of Specifically Dependent Continuant and y is an instance of Object Aggregate and z is an instance of Object, such that z bearer_of x, and all other members of y are bearers of a unique instance of the same type as x. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + inheres in aggregate + + + + + + + + + + + c is_affected_by p iff p is an instance of a Process and c is an instance of a Continuant, such that p influences c in some manner, most often by producing a change in c. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is affected by + + + + + + + + + + x is_cause_of y iff x and y are instances of occurrents, and y is a consequence of x. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is cause of + + + + + + + + + + 2022-12-30T21:32:27-05:00 + https://cubrc.org + Inverse of disrupts. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is disrupted by + is disrupted by + + + + + + + + + + + x is_input_of y iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the beginning of y is a necessary condition for the start of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is input of + + + + + + + + + + + + An object o is made of an object m when m is the material that o consists of and that material does not undergo a change of kind during the creation of o + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is made of + + + + + + + + + + An object m is material of an object o when m is the material of which o consists and that material does not undergo a change of kind during the creation of o + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is material of + + + + + + + + + If p is a process and c is a continuant, then c is object of p if and only if the c is part of the projected state that the agent intends to achieve by performing p. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is object of + + + + + + + + + + + x is_output_of y iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the end of y is a necessary condition for the completion of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is output of + + + + + + + + + + + x is_part_of_process y iff x and y are instances of Process, such that x occurs during the temporal interval of y, and x either provides an input to y or receives an output of y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is part of process + + + + + + + + + + + A continuant c1 is a predecessor of some continuant c2 iff there is some process p1 and c1 is an input to p1 and c2 is an output of p1. + More informally, c1 is a predecessor of c2 iff c1 has been followed or replaced by c2. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is predecessor of + + + + + + + + + + + + x is_site_of y iff x is an instance of Site and y is an instance of Process, such that y occurs in x. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is site of + + + + + + + + + + A continuant c2 is a successor of some continuant c1 iff there is some process p1 and c1 is an input to p1 and c2 is an output of p1. Inverse of is predecessor. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is successor of + + + + + + + + + + + + + + + + + x is_temporal_region_of y iff y is an instance of a process or process boundary and x is an instance of a temporal region, such that the duration of x temporally projects on y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + is temporal region of + Leaving this is in ERO for now since BFO2020 has no inverse of occupies-temporal-region yet. + + + + + + + + + + + x occurs_at y iff x is an instance of Process and y is an instance of Site, such that x occurs in y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + occurs at + + + + + + + + + + x process_started_by y iff x and y are instances of processes, and x is caused_by y, and i is an instance of a temporal instant, and r is an instant of a temporal interval, and x has starting instance i, and y occurs on r, and r interval contains i. + A process x is started by another process y when y causes x while y is still occurring. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + process started by + + + + + + + + + x process_starts y iff x and y are instances of processes, and x is_cause_of y, and i is an instance of a temporal instant, and r is an instant of a temporal interval, and y has starting instance i, and x occurs on r, and r interval contains i. + A process x starts another process y when x causes y while x is still occurring. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + process starts + + + + + + + + + + + x quality_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Quality, such that x disposition_of_aggregate y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + quality of aggregate + + + + + + + + + + + x role_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Role, such that x disposition_of_aggregate y. + http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology + role of aggregate + + + + + + + diff --git a/dataload/rdf2json/src/main/resources/result.ttl b/dataload/rdf2json/src/main/resources/result.ttl new file mode 100644 index 000000000..76c66efb0 --- /dev/null +++ b/dataload/rdf2json/src/main/resources/result.ttl @@ -0,0 +1,1529 @@ +@base . +@prefix : . +@prefix dc: . +@prefix owl: . +@prefix rdf: . +@prefix xml: . +@prefix xsd: . +@prefix dc11: . +@prefix rdfs: . +@prefix skos: . +# +# +# ################################################################# +# # +# # Annotation properties +# # +# ################################################################# +# +# +# http://purl.org/dc/elements/1.1/contributor +# +# http://purl.org/dc/elements/1.1/identifier +# +# http://purl.org/dc/elements/1.1/license +# +# http://purl.org/dc/terms/description +# +# http://purl.org/dc/terms/license +# +# http://purl.org/dc/terms/title +# +# http://www.w3.org/2004/02/skos/core#altLabel +# +# http://www.w3.org/2004/02/skos/core#definition +# +# http://www.w3.org/2004/02/skos/core#example +# +# http://www.w3.org/2004/02/skos/core#prefLabel +# +# http://www.w3.org/2004/02/skos/core#scopeNote +# +# +# +# ################################################################# +# # +# # Object Properties +# # +# ################################################################# +# +# +# http://purl.obolibrary.org/obo/BFO_0000054 +# +# http://purl.obolibrary.org/obo/BFO_0000055 +# +# http://purl.obolibrary.org/obo/BFO_0000056 +# +# http://purl.obolibrary.org/obo/BFO_0000057 +# +# http://purl.obolibrary.org/obo/BFO_0000058 +# +# http://purl.obolibrary.org/obo/BFO_0000059 +# +# http://purl.obolibrary.org/obo/BFO_0000062 +# +# http://purl.obolibrary.org/obo/BFO_0000063 +# +# http://purl.obolibrary.org/obo/BFO_0000066 +# +# http://purl.obolibrary.org/obo/BFO_0000084 +# +# http://purl.obolibrary.org/obo/BFO_0000101 +# +# http://purl.obolibrary.org/obo/BFO_0000108 +# +# http://purl.obolibrary.org/obo/BFO_0000115 +# +# http://purl.obolibrary.org/obo/BFO_0000117 +# +# http://purl.obolibrary.org/obo/BFO_0000121 +# +# http://purl.obolibrary.org/obo/BFO_0000124 +# +# http://purl.obolibrary.org/obo/BFO_0000127 +# +# http://purl.obolibrary.org/obo/BFO_0000129 +# +# http://purl.obolibrary.org/obo/BFO_0000132 +# +# http://purl.obolibrary.org/obo/BFO_0000139 +# +# http://purl.obolibrary.org/obo/BFO_0000153 +# +# http://purl.obolibrary.org/obo/BFO_0000171 +# +# http://purl.obolibrary.org/obo/BFO_0000176 +# +# http://purl.obolibrary.org/obo/BFO_0000178 +# +# http://purl.obolibrary.org/obo/BFO_0000183 +# +# http://purl.obolibrary.org/obo/BFO_0000184 +# +# http://purl.obolibrary.org/obo/BFO_0000185 +# +# http://purl.obolibrary.org/obo/BFO_0000194 +# +# http://purl.obolibrary.org/obo/BFO_0000195 +# +# http://purl.obolibrary.org/obo/BFO_0000196 +# +# http://purl.obolibrary.org/obo/BFO_0000197 +# +# http://purl.obolibrary.org/obo/BFO_0000199 +# +# http://purl.obolibrary.org/obo/BFO_0000200 +# +# http://purl.obolibrary.org/obo/BFO_0000210 +# +# http://purl.obolibrary.org/obo/BFO_0000216 +# +# http://purl.obolibrary.org/obo/BFO_0000218 +# +# http://purl.obolibrary.org/obo/BFO_0000221 +# +# http://purl.obolibrary.org/obo/BFO_0000222 +# +# http://purl.obolibrary.org/obo/BFO_0000223 +# +# http://purl.obolibrary.org/obo/BFO_0000224 +# +# +# +# ################################################################# +# # +# # Classes +# # +# ################################################################# +# +# +# http://purl.obolibrary.org/obo/BFO_0000001 +# +# http://purl.obolibrary.org/obo/BFO_0000002 +# +# http://purl.obolibrary.org/obo/BFO_0000003 +# +# http://purl.obolibrary.org/obo/BFO_0000004 +# +# http://purl.obolibrary.org/obo/BFO_0000006 +# +# http://purl.obolibrary.org/obo/BFO_0000008 +# +# http://purl.obolibrary.org/obo/BFO_0000009 +# +# http://purl.obolibrary.org/obo/BFO_0000011 +# +# http://purl.obolibrary.org/obo/BFO_0000015 +# +# http://purl.obolibrary.org/obo/BFO_0000016 +# +# http://purl.obolibrary.org/obo/BFO_0000017 +# +# http://purl.obolibrary.org/obo/BFO_0000018 +# +# http://purl.obolibrary.org/obo/BFO_0000019 +# +# http://purl.obolibrary.org/obo/BFO_0000020 +# +# http://purl.obolibrary.org/obo/BFO_0000023 +# +# http://purl.obolibrary.org/obo/BFO_0000024 +# +# http://purl.obolibrary.org/obo/BFO_0000026 +# +# http://purl.obolibrary.org/obo/BFO_0000027 +# +# http://purl.obolibrary.org/obo/BFO_0000028 +# +# http://purl.obolibrary.org/obo/BFO_0000029 +# +# http://purl.obolibrary.org/obo/BFO_0000030 +# +# http://purl.obolibrary.org/obo/BFO_0000031 +# +# http://purl.obolibrary.org/obo/BFO_0000034 +# +# http://purl.obolibrary.org/obo/BFO_0000035 +# +# http://purl.obolibrary.org/obo/BFO_0000038 +# +# http://purl.obolibrary.org/obo/BFO_0000040 +# +# http://purl.obolibrary.org/obo/BFO_0000140 +# +# http://purl.obolibrary.org/obo/BFO_0000141 +# +# http://purl.obolibrary.org/obo/BFO_0000142 +# +# http://purl.obolibrary.org/obo/BFO_0000145 +# +# http://purl.obolibrary.org/obo/BFO_0000146 + + a owl:Ontology; + owl:versionIRI ; + dc11:contributor "Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bill Duncan", + "Bjoern Peters", "Chris Mungall", "David Osumi-Sutherland", "Fabian Neuhaus", "James A. Overton", + "Janna Hastings", "Jie Zheng", "John Beverley", "Jonathan Bona", "Larry Hunter", "Leonard Jacuzzo", + "Ludger Jansen", "Mark Jensen", "Mark Ressler", "Mathias Brochhausen", "Mauricio Almeida", + "Melanie Courtot", "Neil Otte", "Pierre Grenon", "Randall Dipert", "Robert Rovetto", + "Ron Rudnicki", "Stefan Schulz", "Thomas Bittner", "Werner Ceusters", "Yongqun \"Oliver\" He"; + dc:description "Basic Formal Ontology implemented in the Web Ontology Language (OWL 2) with direct semantics."@en; + dc:license ; + dc:title "BFO 2020"; + rdfs:comment "The most recent version of this file will always be in the GitHub repository https://github.com/bfo-ontology/bfo-2020" . + +dc11:contributor a owl:AnnotationProperty . + +dc11:identifier a owl:AnnotationProperty . + +dc11:license a owl:AnnotationProperty . + +dc:description a owl:AnnotationProperty . + +dc:license a owl:AnnotationProperty . + +dc:title a owl:AnnotationProperty . + +skos:altLabel a owl:AnnotationProperty . + +skos:definition a owl:AnnotationProperty . + +skos:example a owl:AnnotationProperty . + +skos:prefLabel a owl:AnnotationProperty . + +skos:scopeNote a owl:AnnotationProperty . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "206-BFO"; + rdfs:label "has realization"@en; + skos:altLabel "realized in"@en; + skos:definition "b has realization c =Def c realizes b"@en; + skos:example "As for realizes"@en . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "059-BFO"; + rdfs:label "realizes"@en; + skos:definition "(Elucidation) realizes is a relation between a process b and realizable entity c such that c inheres in some d & for all t, if b has participant d then c exists & the type instantiated by b is correlated with the type instantiated by c"@en; + skos:example "A balding process realizes a disposition to go bald; a studying process realizes a student role; a process of pumping blood realizes the pumping function of a heart"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain _:genid1; + rdfs:range ; + dc11:identifier "250-BFO"; + rdfs:label "participates in"@en; + skos:definition "(Elucidation) participates in holds between some b that is either a specifically dependent continuant or generically dependent continuant or independent continuant that is not a spatial region & some process p such that b participates in p some way"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid1 a owl:Class; + owl:unionOf _:genid8 . + +_:genid8 a rdf:List; + rdf:first ; + rdf:rest _:genid7 . + +_:genid7 a rdf:List; + rdf:first ; + rdf:rest _:genid2 . + +_:genid2 a rdf:List; + rdf:first _:genid3; + rdf:rest rdf:nil . + +_:genid3 a owl:Class; + owl:intersectionOf _:genid6 . + +_:genid6 a rdf:List; + rdf:first ; + rdf:rest _:genid4 . + +_:genid4 a rdf:List; + rdf:first _:genid5; + rdf:rest rdf:nil . + +_:genid5 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range _:genid9; + dc11:identifier "248-BFO"; + rdfs:label "has participant"@en; + skos:definition "p has participant c =Def c participates in p"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid9 a owl:Class; + owl:unionOf _:genid16 . + +_:genid16 a rdf:List; + rdf:first ; + rdf:rest _:genid15 . + +_:genid15 a rdf:List; + rdf:first ; + rdf:rest _:genid10 . + +_:genid10 a rdf:List; + rdf:first _:genid11; + rdf:rest rdf:nil . + +_:genid11 a owl:Class; + owl:intersectionOf _:genid14 . + +_:genid14 a rdf:List; + rdf:first ; + rdf:rest _:genid12 . + +_:genid12 a rdf:List; + rdf:first _:genid13; + rdf:rest rdf:nil . + +_:genid13 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range _:genid17; + dc11:identifier "258-BFO"; + rdfs:label "is concretized by"@en; + skos:definition "c is concretized by b =Def b concretizes c"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid17 a owl:Class; + owl:unionOf _:genid19 . + +_:genid19 a rdf:List; + rdf:first ; + rdf:rest _:genid18 . + +_:genid18 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty; + rdfs:domain _:genid20; + rdfs:range ; + dc11:identifier "256-BFO"; + rdfs:label "concretizes"@en; + skos:definition "b concretizes c =Def b is a process or a specifically dependent continuant & c is a generically dependent continuant & there is some time t such that c is the pattern or content which b shares at t with actual or potential copies"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid20 a owl:Class; + owl:unionOf _:genid22 . + +_:genid22 a rdf:List; + rdf:first ; + rdf:rest _:genid21 . + +_:genid21 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty, owl:TransitiveProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "213-BFO"; + rdfs:label "preceded by"@en; + skos:definition "b preceded by c =Def b precedes c"@en; + skos:example "The temporal region occupied by the second half of the match is preceded by the temporal region occupied by the first half of the match"@en . + + a owl:ObjectProperty, owl:TransitiveProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "270-BFO"; + rdfs:label "precedes"@en; + skos:definition "(Elucidation) precedes is a relation between occurrents o, o' such that if t is the temporal extent of o & t' is the temporal extent of o' then either the last instant of o is before the first instant of o' or the last instant of o is the first instant of o' & neither o nor o' are temporal instants"@en; + skos:example "The temporal region occupied by Mary's birth precedes the temporal region occupied by Mary's death."@en; + skos:scopeNote "Each temporal region is its own temporal extent. The temporal extent of a spatiotemporal region is the temporal region it temporally projects onto. The temporal extent of a process or process boundary that occupies temporal region t is t.", + "Precedes defines a strict partial order on occurrents." . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain _:genid23; + rdfs:range _:genid26; + dc11:identifier "143-BFO"; + rdfs:label "occurs in"@en; + skos:definition "b occurs in c =Def b is a process or a process boundary & c is a material entity or site & there exists a spatiotemporal region r & b occupies spatiotemporal region r & for all time t, if b exists at t then c exists at t & there exist spatial regions s and s' where b spatially projects onto s at t & c occupies spatial region s' at t & s is a continuant part of s' at t"@en; + skos:example "A process of digestion occurs in the interior of an organism; a process of loading artillery rounds into a tank cannon occurs in the interior of the tank"@en . + +_:genid23 a owl:Class; + owl:unionOf _:genid25 . + +_:genid25 a rdf:List; + rdf:first ; + rdf:rest _:genid24 . + +_:genid24 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid26 a owl:Class; + owl:unionOf _:genid28 . + +_:genid28 a rdf:List; + rdf:first ; + rdf:rest _:genid27 . + +_:genid27 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range _:genid29; + dc11:identifier "252-BFO"; + rdfs:label "generically depends on"@en; + skos:altLabel "g-depends on"@en; + skos:definition "b generically depends on c =Def b is a generically dependent continuant & c is an independent continuant that is not a spatial region & at some time t there inheres in c a specifically dependent continuant which concretizes b at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid29 a owl:Class; + owl:intersectionOf _:genid32 . + +_:genid32 a rdf:List; + rdf:first ; + rdf:rest _:genid30 . + +_:genid30 a rdf:List; + rdf:first _:genid31; + rdf:rest rdf:nil . + +_:genid31 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:domain _:genid33; + rdfs:range ; + dc11:identifier "254-BFO"; + rdfs:label "is carrier of"@en; + skos:definition "b is carrier of c =Def there is some time t such that c generically depends on b at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid33 a owl:Class; + owl:intersectionOf _:genid36 . + +_:genid36 a rdf:List; + rdf:first ; + rdf:rest _:genid34 . + +_:genid34 a rdf:List; + rdf:first _:genid35; + rdf:rest rdf:nil . + +_:genid35 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "118-BFO"; + rdfs:label "exists at"@en; + skos:definition "(Elucidation) exists at is a relation between a particular and some temporal region at which the particular exists"@en; + skos:example "First World War exists at 1914-1916; Mexico exists at January 1, 2000"@en . + + a owl:ObjectProperty; + rdfs:subPropertyOf ; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "230-BFO"; + rdfs:label "has member part"@en; + skos:definition "b has member part c =Def c member part of b"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty, owl:TransitiveProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "202-BFO"; + rdfs:label "has occurrent part"@en; + skos:definition "b has occurrent part c =Def c occurrent part of b"@en; + skos:example "Mary's life has occurrent part Mary's 5th birthday"@en . + + a owl:ObjectProperty, owl:TransitiveProperty; + rdfs:subPropertyOf ; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "211-BFO"; + rdfs:label "has temporal part"@en; + skos:definition "b has temporal part c =Def c temporal part of b"@en; + skos:example "Your life has temporal part the first year of your life"@en . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain _:genid37; + rdfs:range _:genid41; + dc11:identifier "236-BFO"; + rdfs:label "location of"@en; + skos:definition "b location of c =Def c located in b"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid37 a owl:Class; + owl:intersectionOf _:genid40 . + +_:genid40 a rdf:List; + rdf:first ; + rdf:rest _:genid38 . + +_:genid38 a rdf:List; + rdf:first _:genid39; + rdf:rest rdf:nil . + +_:genid39 a owl:Class; + owl:complementOf . + +_:genid41 a owl:Class; + owl:intersectionOf _:genid44 . + +_:genid44 a rdf:List; + rdf:first ; + rdf:rest _:genid42 . + +_:genid42 a rdf:List; + rdf:first _:genid43; + rdf:rest rdf:nil . + +_:genid43 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "244-BFO"; + rdfs:label "material basis of"@en; + skos:definition "b material basis of c =Def c has material basis b"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + rdfs:subPropertyOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "228-BFO"; + rdfs:label "member part of"@en; + skos:definition "b member part of c =Def b is an object & c is a material entity & there is some time t such that b continuant part of c at t & there is a mutually exhaustive and pairwise disjoint partition of c into objects x1, ..., xn (for some n ≠ 1) with b = xi (for some 1 <= i <= n)"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty, owl:TransitiveProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "003-BFO"; + rdfs:label "occurrent part of"@en; + skos:definition "(Elucidation) occurrent part of is a relation between occurrents b and c when b is part of c"@en; + skos:example "Mary's 5th birthday is an occurrent part of Mary's life; the first set of the tennis match is an occurrent part of the tennis match"@en . + + a owl:ObjectProperty, owl:TransitiveProperty; + rdfs:subPropertyOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "078-BFO"; + rdfs:label "temporal part of"@en; + skos:definition "b temporal part of c =Def b occurrent part of c & (b and c are temporal regions) or (b and c are spatiotemporal regions & b temporally projects onto an occurrent part of the temporal region that c temporally projects onto) or (b and c are processes or process boundaries & b occupies a temporal region that is an occurrent part of the temporal region that c occupies)"@en; + skos:example "Your heart beating from 4pm to 5pm today is a temporal part of the process of your heart beating; the 4th year of your life is a temporal part of your life, as is the process boundary which separates the 3rd and 4th years of your life; the first quarter of a game of football is a temporal part of the whole game"@en . + + a owl:ObjectProperty, owl:FunctionalProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "080-BFO"; + rdfs:label "temporally projects onto"@en; + skos:definition "(Elucidation) temporally projects onto is a relation between a spatiotemporal region s and some temporal region which is the temporal extent of s"@en; + skos:example "The world line of a particle temporally projects onto the temporal region extending from the beginning to the end of the existence of the particle"@en . + + a owl:ObjectProperty; + rdfs:domain _:genid45; + rdfs:range _:genid49; + dc11:identifier "234-BFO"; + rdfs:label "located in"@en; + skos:definition "b located in c =Def b is an independent continuant & c is an independent & neither is a spatial region & there is some time t such that the spatial region which b occupies at t is continuant part of the spatial region which c occupies at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid45 a owl:Class; + owl:intersectionOf _:genid48 . + +_:genid48 a rdf:List; + rdf:first ; + rdf:rest _:genid46 . + +_:genid46 a rdf:List; + rdf:first _:genid47; + rdf:rest rdf:nil . + +_:genid47 a owl:Class; + owl:complementOf . + +_:genid49 a owl:Class; + owl:intersectionOf _:genid52 . + +_:genid52 a rdf:List; + rdf:first ; + rdf:rest _:genid50 . + +_:genid50 a rdf:List; + rdf:first _:genid51; + rdf:rest rdf:nil . + +_:genid51 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "221-BFO"; + rdfs:label "continuant part of"@en; + skos:definition "b continuant part of c =Def b and c are continuants & there is some time t such that b and c exist at t & b continuant part of c at t"@en; + skos:example "Milk teeth continuant part of human; surgically removed tumour continuant part of organism"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "271-BFO"; + rdfs:label "has continuant part"@en; + skos:definition "b has continuant part c =Def c continuant part of b"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + rdfs:domain _:genid53; + rdfs:range _:genid56; + dc11:identifier "267-BFO"; + rdfs:label "environs"@en; + skos:altLabel "contains process"@en; + skos:definition "b environs c =Def c occurs in b"@en; + skos:example "Mouth environs process of mastication; city environs traffic"@en . + +_:genid53 a owl:Class; + owl:unionOf _:genid55 . + +_:genid55 a rdf:List; + rdf:first ; + rdf:rest _:genid54 . + +_:genid54 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid56 a owl:Class; + owl:unionOf _:genid58 . + +_:genid58 a rdf:List; + rdf:first ; + rdf:rest _:genid57 . + +_:genid57 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty, owl:FunctionalProperty, owl:InverseFunctionalProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "144-BFO"; + rdfs:label "history of"@en; + skos:definition "(Elucidation) history of is a relation between history b and material entity c such that b is the unique history of c"@en; + skos:example "This life is the history of this organism"@en . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "145-BFO"; + rdfs:label "has history"@en; + skos:definition "b has history c =Def c history of b"@en; + skos:example "This organism has history this life"@en . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain _:genid59; + rdfs:range ; + dc11:identifier "260-BFO"; + rdfs:label "specifically depended on by"@en; + skos:altLabel "s-depended on by"@en; + skos:definition "b specifically depended on by c =Def c specifically depends on b"@en; + skos:example "Coloured object specifically depended on by colour"@en . + +_:genid59 a owl:Class; + owl:unionOf _:genid65 . + +_:genid65 a rdf:List; + rdf:first ; + rdf:rest _:genid60 . + +_:genid60 a rdf:List; + rdf:first _:genid61; + rdf:rest rdf:nil . + +_:genid61 a owl:Class; + owl:intersectionOf _:genid64 . + +_:genid64 a rdf:List; + rdf:first ; + rdf:rest _:genid62 . + +_:genid62 a rdf:List; + rdf:first _:genid63; + rdf:rest rdf:nil . + +_:genid63 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range _:genid66; + dc11:identifier "012-BFO"; + rdfs:label "specifically depends on"@en; + skos:altLabel "s-depends on"@en; + skos:definition "(Elucidation) specifically depends on is a relation between a specifically dependent continuant b and specifically dependent continuant or independent continuant that is not a spatial region c such that b and c share no parts in common & b is of a nature such that at all times t it cannot exist unless c exists & b is not a boundary of c"@en; + skos:example "A shape specifically depends on the shaped object; hue, saturation and brightness of a colour sample specifically depends on each other"@en; + skos:scopeNote "The analogue of specifically depends on for occurrents is has participant."@en . + +_:genid66 a owl:Class; + owl:unionOf _:genid72 . + +_:genid72 a rdf:List; + rdf:first ; + rdf:rest _:genid67 . + +_:genid67 a rdf:List; + rdf:first _:genid68; + rdf:rest rdf:nil . + +_:genid68 a owl:Class; + owl:intersectionOf _:genid71 . + +_:genid71 a rdf:List; + rdf:first ; + rdf:rest _:genid69 . + +_:genid69 a rdf:List; + rdf:first _:genid70; + rdf:rest rdf:nil . + +_:genid70 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:subPropertyOf ; + owl:inverseOf ; + rdfs:domain _:genid73; + rdfs:range ; + dc11:identifier "053-BFO"; + rdfs:label "bearer of"@en; + skos:definition "b bearer of c =Def c inheres in b"@en; + skos:example "A patch of ink is the bearer of a colour quality; an organism is the bearer of a temperature quality"@en . + +_:genid73 a owl:Class; + owl:intersectionOf _:genid76 . + +_:genid76 a rdf:List; + rdf:first ; + rdf:rest _:genid74 . + +_:genid74 a rdf:List; + rdf:first _:genid75; + rdf:rest rdf:nil . + +_:genid75 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:subPropertyOf ; + rdfs:domain ; + rdfs:range _:genid77; + dc11:identifier "051-BFO"; + rdfs:label "inheres in"@en; + skos:definition "b inheres in c =Def b is a specifically dependent continuant & c is an independent continuant that is not a spatial region & b specifically depends on c"@en; + skos:example "A shape inheres in a shaped object; a mass inheres in a material entity"@en . + +_:genid77 a owl:Class; + owl:intersectionOf _:genid80 . + +_:genid80 a rdf:List; + rdf:first ; + rdf:rest _:genid78 . + +_:genid78 a rdf:List; + rdf:first _:genid79; + rdf:rest rdf:nil . + +_:genid79 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty, owl:FunctionalProperty; + rdfs:domain _:genid81; + rdfs:range ; + dc11:identifier "132-BFO"; + rdfs:label "occupies temporal region"@en; + skos:definition "p occupies temporal region t =Def p is a process or process boundary & the spatiotemporal region occupied by p temporally projects onto t"@en; + skos:example "The Second World War occupies the temporal region September 1, 1939 - September 2, 1945"@en . + +_:genid81 a owl:Class; + owl:unionOf _:genid83 . + +_:genid83 a rdf:List; + rdf:first ; + rdf:rest _:genid82 . + +_:genid82 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty, owl:FunctionalProperty; + rdfs:domain _:genid84; + rdfs:range ; + dc11:identifier "082-BFO"; + rdfs:label "occupies spatiotemporal region"@en; + skos:definition "(Elucidation) occupies spatiotemporal region is a relation between a process or process boundary p and the spatiotemporal region s which is its spatiotemporal extent"@en; + skos:example "A particle emitted by a nuclear reactor occupies the spatiotemporal region which is its trajectory"@en . + +_:genid84 a owl:Class; + owl:unionOf _:genid86 . + +_:genid86 a rdf:List; + rdf:first ; + rdf:rest _:genid85 . + +_:genid85 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:ObjectProperty; + rdfs:domain _:genid87; + rdfs:range ; + dc11:identifier "232-BFO"; + rdfs:label "occupies spatial region"@en; + skos:definition "b occupies spatial region r =Def b is an independent continuant that is not a spatial region & r is a spatial region & there is some time t such that every continuant part of b occupies some continuant part of r at t and no continuant part of b occupies any spatial region that is not a continuant part of r at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + +_:genid87 a owl:Class; + owl:intersectionOf _:genid90 . + +_:genid90 a rdf:List; + rdf:first ; + rdf:rest _:genid88 . + +_:genid88 a rdf:List; + rdf:first _:genid89; + rdf:rest rdf:nil . + +_:genid89 a owl:Class; + owl:complementOf . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "246-BFO"; + rdfs:label "spatially projects onto"@en; + skos:definition "(Elucidation) spatially projects onto is a relation between some spatiotemporal region b and spatial region c such that at some time t, c is the spatial extent of b at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "242-BFO"; + rdfs:label "has material basis"@en; + skos:definition "b has material basis c =Def b is a disposition & c is a material entity & there is some d bearer of b & there is some time t such that c is a continuant part of d at t & d has disposition b because c is a continuant part of d at t"@en; + skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "268-BFO"; + rdfs:label "first instant of"@en; + skos:definition "t first instant of t' =Def t is a temporal instant & t' is a temporal region t' & t precedes all temporal parts of t' other than t"@en; + skos:example "An hour starting at midnight yesterday has first instant midnight yesterday"@en . + + a owl:ObjectProperty, owl:FunctionalProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "261-BFO"; + rdfs:label "has first instant"@en; + skos:definition "t has first instant t' =Def t' first instant of t"@en; + skos:example "The first hour of a year has first instant midnight on December 31"@en . + + a owl:ObjectProperty; + owl:inverseOf ; + rdfs:domain ; + rdfs:range ; + dc11:identifier "269-BFO"; + rdfs:label "last instant of"@en; + skos:definition "t last instant of t' =Def t is a temporal instant & t' is a temporal region & all temporal parts of t' other than t precede t"@en; + skos:example "Last midnight is the last instant of yesterday"@en . + + a owl:ObjectProperty, owl:FunctionalProperty; + rdfs:domain ; + rdfs:range ; + dc11:identifier "215-BFO"; + rdfs:label "has last instant"@en; + skos:definition "t has last instant t' =Def t' last instant of t"@en; + skos:example "The last hour of a year has last instant midnight December 31"@en . + + a owl:Class; + dc11:identifier "001-BFO"; + rdfs:label "entity"@en; + skos:definition "(Elucidation) An entity is anything that exists or has existed or will exist"@en; + skos:example "Julius Caesar; the Second World War; your body mass index; Verdi's Requiem"@en . + + a owl:Class; + rdfs:subClassOf , _:genid91; + owl:disjointWith ; + dc11:identifier "008-BFO"; + rdfs:label "continuant"@en; + skos:definition "(Elucidation) A continuant is an entity that persists, endures, or continues to exist through time while maintaining its identity"@en; + skos:example "A human being; a tennis ball; a cave; a region of space; someone's temperature"@en . + +_:genid91 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "077-BFO"; + rdfs:label "occurrent"@en; + skos:definition "(Elucidation) An occurrent is an entity that unfolds itself in time or it is the start or end of such an entity or it is a temporal or spatiotemporal region"@en; + skos:example "As for process, history, process boundary, spatiotemporal region, zero-dimensional temporal region, one-dimensional temporal region, temporal interval, temporal instant."@en . + + a owl:Class; + rdfs:subClassOf , _:genid92; + dc11:identifier "017-BFO"; + rdfs:label "independent continuant"@en; + skos:definition "b is an independent continuant =Def b is a continuant & there is no c such that b specifically depends on c or b generically depends on c"@en; + skos:example "An atom; a molecule; an organism; a heart; a chair; the bottom right portion of a human torso; a leg; the interior of your mouth; a spatial region; an orchestra"@en . + +_:genid92 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid93; + dc11:identifier "035-BFO"; + rdfs:label "spatial region"@en; + skos:definition "(Elucidation) A spatial region is a continuant entity that is a continuant part of the spatial projection of a portion of spacetime at a given time"@en; + skos:example "As for zero-dimensional spatial region, one-dimensional spatial region, two-dimensional spatial region, three-dimensional spatial region"@en . + +_:genid93 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid94, _:genid95; + dc11:identifier "100-BFO"; + rdfs:label "temporal region"@en; + skos:definition "(Elucidation) A temporal region is an occurrent over which processes can unfold"@en; + skos:example "As for zero-dimensional temporal region and one-dimensional temporal region"@en . + +_:genid94 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid95 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid96; + dc11:identifier "039-BFO"; + rdfs:label "two-dimensional spatial region"@en; + skos:definition "(Elucidation) A two-dimensional spatial region is a spatial region that is a whole consisting of a surface together with zero or more surfaces which may have spatial regions of lower dimension as parts"@en; + skos:example "The surface of a sphere-shaped part of space; an infinitely thin plane in space"@en . + +_:genid96 a owl:Restriction; + owl:allValuesFrom _:genid97; + owl:onProperty . + +_:genid97 a owl:Class; + owl:unionOf _:genid100 . + +_:genid100 a rdf:List; + rdf:first ; + rdf:rest _:genid99 . + +_:genid99 a rdf:List; + rdf:first ; + rdf:rest _:genid98 . + +_:genid98 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:Class; + rdfs:subClassOf , _:genid101, _:genid102; + dc11:identifier "095-BFO"; + rdfs:label "spatiotemporal region"@en; + skos:definition "(Elucidation) A spatiotemporal region is an occurrent that is an occurrent part of spacetime"@en; + skos:example "The spatiotemporal region occupied by the development of a cancer tumour; the spatiotemporal region occupied by an orbiting satellite"@en; + skos:scopeNote "'Spacetime' here refers to the maximal instance of the universal spatiotemporal region."@en . + +_:genid101 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid102 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid103, _:genid107, _:genid108; + dc11:identifier "083-BFO"; + rdfs:label "process"@en; + skos:altLabel "event"@en; + skos:definition "(Elucidation) p is a process means p is an occurrent that has some temporal proper part and for some time t, p has some material entity as participant"@en; + skos:example "An act of selling; the life of an organism; a process of sleeping; a process of cell-division; a beating of the heart; a process of meiosis; the taxiing of an aircraft; the programming of a computer"@en . + +_:genid103 a owl:Restriction; + owl:allValuesFrom _:genid104; + owl:onProperty . + +_:genid104 a owl:Class; + owl:unionOf _:genid106 . + +_:genid106 a rdf:List; + rdf:first ; + rdf:rest _:genid105 . + +_:genid105 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid107 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid108 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + owl:disjointWith ; + dc11:identifier "062-BFO"; + rdfs:label "disposition"@en; + skos:altLabel "internally-grounded realizable entity"@en; + skos:definition "(Elucidation) A disposition b is a realizable entity such that if b ceases to exist then its bearer is physically changed & b's realization occurs when and because this bearer is in some special physical circumstances & this realization occurs in virtue of the bearer's physical make-up"@en; + skos:example "An atom of element X has the disposition to decay to an atom of element Y; the cell wall is disposed to transport cellular material through endocytosis and exocytosis; certain people have a predisposition to colon cancer; children are innately disposed to categorize objects in certain ways"@en . + + a owl:Class; + rdfs:subClassOf ; + owl:disjointWith ; + dc11:identifier "058-BFO"; + rdfs:label "realizable entity"@en; + skos:definition "(Elucidation) A realizable entity is a specifically dependent continuant that inheres in some independent continuant which is not a spatial region & which is of a type some instances of which are realized in processes of a correlated type"@en; + skos:example "The role of being a doctor; the role of this boundary to delineate where Utah and Colorado meet; the function of your reproductive organs; the disposition of your blood to coagulate; the disposition of this piece of metal to conduct electricity"@en . + + a owl:Class; + rdfs:subClassOf , _:genid109; + dc11:identifier "037-BFO"; + rdfs:label "zero-dimensional spatial region"@en; + skos:definition "(Elucidation) A zero-dimensional spatial region is one or a collection of more than one spatially disjoint points in space"@en; + skos:example "The spatial region occupied at some time instant by the North Pole"@en . + +_:genid109 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "055-BFO"; + rdfs:label "quality"@en; + skos:definition "(Elucidation) A quality is a specifically dependent continuant that, in contrast to roles and dispositions, does not require any further process in order to be realized"@en; + skos:example "The colour of a tomato; the ambient temperature of this portion of air; the length of the circumference of your waist; the shape of your nose; the shape of your nostril; the mass of this piece of gold"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "050-BFO"; + rdfs:label "specifically dependent continuant"@en; + skos:definition "b is a specifically dependent continuant =Def b is a continuant & there is some independent continuant c which is not a spatial region & which is such that b specifically depends on c"@en; + skos:example "(with multiple bearers) John's love for Mary; the ownership relation between John and this statue; the relation of authority between John and his subordinates"@en, + "(with one bearer) The mass of this tomato; the pink colour of a medium rare piece of grilled filet mignon at its centre; the smell of this portion of mozzarella; the disposition of this fish to decay; the role of being a doctor; the function of this heart to pump blood; the shape of this hole"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "061-BFO"; + rdfs:label "role"@en; + skos:altLabel "externally-grounded realizable entity"@en; + skos:definition "(Elucidation) A role b is a realizable entity such that b exists because there is some single bearer that is in some special physical, social, or institutional set of circumstances in which this bearer does not have to be & b is not such that, if it ceases to exist, then the physical make-up of the bearer is thereby changed"@en; + skos:example "The priest role; the student role; the role of subject in a clinical trial; the role of a stone in marking a property boundary; the role of a boundary to demarcate two neighbouring administrative territories; the role of a building in serving as a military target"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "027-BFO"; + rdfs:label "fiat object part"@en; + skos:definition "(Elucidation) A fiat object part b is a material entity & such that if b exists then it is continuant part of some object c & demarcated from the remainder of c by one or more fiat surfaces"@en; + skos:example "The upper and lower lobes of the left lung; the dorsal and ventral surfaces of the body; the Western hemisphere of the Earth; the FMA:regional parts of an intact human body"@en . + + a owl:Class; + rdfs:subClassOf , _:genid110; + dc11:identifier "038-BFO"; + rdfs:label "one-dimensional spatial region"@en; + skos:definition "(Elucidation) A one-dimensional spatial region is a whole consisting of a line together with zero or more lines which may have points as parts"@en; + skos:example "An edge of a cube-shaped portion of space; a line connecting two points; two parallel lines extended in space"@en . + +_:genid110 a owl:Restriction; + owl:allValuesFrom _:genid111; + owl:onProperty . + +_:genid111 a owl:Class; + owl:unionOf _:genid113 . + +_:genid113 a rdf:List; + rdf:first ; + rdf:rest _:genid112 . + +_:genid112 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "025-BFO"; + rdfs:label "object aggregate"@en; + skos:definition "(Elucidation) An object aggregate is a material entity consisting exactly of a plurality (≥1) of objects as member parts which together form a unit"@en; + skos:example "The aggregate of the musicians in a symphony orchestra and their instruments; the aggregate of bearings in a constant velocity axle joint; the nitrogen atoms in the atmosphere; a collection of cells in a blood biobank"@en; + skos:scopeNote "'Exactly' means that there are no parts of the object aggregate other than its member parts.", + "The unit can, at certain times, consist of exactly one object, for example, when a wolf litter loses all but one of its pups, but it must at some time have a plurality of member parts." . + + a owl:Class; + rdfs:subClassOf , _:genid114; + dc11:identifier "040-BFO"; + rdfs:label "three-dimensional spatial region"@en; + skos:definition "(Elucidation) A three-dimensional spatial region is a whole consisting of a spatial volume together with zero or more spatial volumes which may have spatial regions of lower dimension as parts"@en; + skos:example "A cube-shaped region of space; a sphere-shaped region of space; the region of space occupied by all and only the planets in the solar system at some point in time"@en . + +_:genid114 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid115, _:genid119, _:genid123; + dc11:identifier "034-BFO"; + rdfs:label "site"@en; + skos:definition "(Elucidation) A site is a three-dimensional immaterial entity whose boundaries either (partially or wholly) coincide with the boundaries of one or more material entities or have locations determined in relation to some material entity"@en; + skos:example "A hole in a portion of cheese; a rabbit hole; the Grand Canyon; the Piazza San Marco; the kangaroo-joey-containing hole of a kangaroo pouch; your left nostril (a fiat part - the opening - of your left nasal cavity); the lumen of your gut; the hold of a ship; the interior of the trunk of your car; hole in an engineered floor joist"@en . + +_:genid115 a owl:Restriction; + owl:allValuesFrom _:genid116; + owl:onProperty . + +_:genid116 a owl:Class; + owl:unionOf _:genid118 . + +_:genid118 a rdf:List; + rdf:first ; + rdf:rest _:genid117 . + +_:genid117 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid119 a owl:Restriction; + owl:allValuesFrom _:genid120; + owl:onProperty . + +_:genid120 a owl:Class; + owl:unionOf _:genid122 . + +_:genid122 a rdf:List; + rdf:first ; + rdf:rest _:genid121 . + +_:genid121 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid123 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "024-BFO"; + rdfs:label "object"@en; + skos:definition "(Elucidation) An object is a material entity which manifests causal unity & is of a type instances of which are maximal relative to the sort of causal unity manifested"@en; + skos:example "An organism; a fish tank; a planet; a laptop; a valve; a block of marble; an ice cube"@en; + skos:scopeNote "A description of three primary sorts of causal unity is provided in Basic Formal Ontology 2.0. Specification and User Guide"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "074-BFO"; + rdfs:label "generically dependent continuant"@en; + skos:altLabel "g-dependent continuant"@en; + skos:definition "(Elucidation) A generically dependent continuant is an entity that exists in virtue of the fact that there is at least one of what may be multiple copies which is the content or the pattern that multiple copies would share"@en; + skos:example "The pdf file on your laptop; the pdf file that is a copy thereof on my laptop; the sequence of this protein molecule; the sequence that is a copy thereof in that protein molecule; the content that is shared by a string of dots and dashes written on a page and the transmitted Morse code signal; the content of a sentence; an engineering blueprint"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "064-BFO"; + rdfs:label "function"@en; + skos:definition "(Elucidation) A function is a disposition that exists in virtue of its bearer's physical make-up & this physical make-up is something the bearer possesses because it came into being either through evolution (in the case of natural biological entities) or through intentional design (in the case of artefacts) in order to realize processes of a certain sort"@en; + skos:example "The function of a hammer to drive in nails; the function of a heart pacemaker to regulate the beating of a heart through electricity"@en . + + a owl:Class; + rdfs:subClassOf , _:genid124, _:genid125, _:genid126, _:genid130; + dc11:identifier "084-BFO"; + rdfs:label "process boundary"@en; + skos:definition "p is a process boundary =Def p is a temporal part of a process & p has no proper temporal parts"@en; + skos:example "The boundary between the 2nd and 3rd year of your life"@en . + +_:genid124 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid125 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid126 a owl:Restriction; + owl:allValuesFrom _:genid127; + owl:onProperty . + +_:genid127 a owl:Class; + owl:unionOf _:genid129 . + +_:genid129 a rdf:List; + rdf:first ; + rdf:rest _:genid128 . + +_:genid128 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid130 a owl:Restriction; + owl:allValuesFrom _:genid131; + owl:onProperty . + +_:genid131 a owl:Class; + owl:unionOf _:genid133 . + +_:genid133 a rdf:List; + rdf:first ; + rdf:rest _:genid132 . + +_:genid132 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:Class; + rdfs:subClassOf , _:genid134, _:genid138; + owl:disjointWith ; + dc11:identifier "103-BFO"; + rdfs:label "one-dimensional temporal region"@en; + skos:definition "(Elucidation) A one-dimensional temporal region is a temporal region that is a whole that has a temporal interval and zero or more temporal intervals and temporal instants as parts"@en; + skos:example "The temporal region during which a process occurs"@en . + +_:genid134 a owl:Restriction; + owl:allValuesFrom _:genid135; + owl:onProperty . + +_:genid135 a owl:Class; + owl:unionOf _:genid137 . + +_:genid137 a rdf:List; + rdf:first ; + rdf:rest _:genid136 . + +_:genid136 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid138 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid139, _:genid140; + owl:disjointWith ; + dc11:identifier "019-BFO"; + rdfs:label "material entity"@en; + skos:definition "(Elucidation) A material entity is an independent continuant has some portion of matter as continuant part"@en; + skos:example "A human being; the undetached arm of a human being; an aggregate of human beings"@en . + +_:genid139 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid140 a owl:Restriction; + owl:allValuesFrom _:genid141; + owl:onProperty . + +_:genid141 a owl:Class; + owl:unionOf _:genid144 . + +_:genid144 a rdf:List; + rdf:first ; + rdf:rest _:genid143 . + +_:genid143 a rdf:List; + rdf:first ; + rdf:rest _:genid142 . + +_:genid142 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:Class; + rdfs:subClassOf , _:genid145, _:genid146; + dc11:identifier "029-BFO"; + rdfs:label "continuant fiat boundary"@en; + skos:definition "(Elucidation) A continuant fiat boundary b is an immaterial entity that is of zero, one or two dimensions & such that there is no time t when b has a spatial region as continuant part & whose location is determined in relation to some material entity"@en; + skos:example "As for fiat point, fiat line, fiat surface"@en . + +_:genid145 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + +_:genid146 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "028-BFO"; + rdfs:label "immaterial entity"@en; + skos:definition "b is an immaterial entity =Def b is an independent continuant which is such that there is no time t when it has a material entity as continuant part"@en; + skos:example "As for fiat point, fiat line, fiat surface, site"@en . + + a owl:Class; + rdfs:subClassOf , _:genid147; + dc11:identifier "032-BFO"; + rdfs:label "fiat line"@en; + skos:definition "(Elucidation) A fiat line is a one-dimensional continuant fiat boundary that is continuous"@en; + skos:example "The Equator; all geopolitical boundaries; all lines of latitude and longitude; the median sulcus of your tongue; the line separating the outer surface of the mucosa of the lower lip from the outer surface of the skin of the chin"@en . + +_:genid147 a owl:Restriction; + owl:allValuesFrom _:genid148; + owl:onProperty . + +_:genid148 a owl:Class; + owl:unionOf _:genid150 . + +_:genid150 a rdf:List; + rdf:first ; + rdf:rest _:genid149 . + +_:genid149 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "057-BFO"; + rdfs:label "relational quality"@en; + skos:definition "b is a relational quality =Def b is a quality & there exists c and d such that c and d are not identical & b specifically depends on c & b specifically depends on d"@en; + skos:example "A marriage bond; an instance of love; an obligation between one person and another"@en . + + a owl:Class; + rdfs:subClassOf , _:genid151 . + +_:genid151 owl:allValuesFrom . +# +# http://purl.obolibrary.org/obo/BFO_0000147 +# +# http://purl.obolibrary.org/obo/BFO_0000148 +# +# http://purl.obolibrary.org/obo/BFO_0000182 +# +# http://purl.obolibrary.org/obo/BFO_0000202 +# +# http://purl.obolibrary.org/obo/BFO_0000203 +# +# +# +# ################################################################# +# # +# # General axioms +# # +# ################################################################# +# +# +# +# +# +# +# +# Generated by the OWL API (version 4.5.29) https://github.com/owlcs/owlapi + +_:genid151 a owl:Restriction; + owl:onProperty . + + dc11:identifier "033-BFO"; + rdfs:label "fiat surface"@en; + skos:definition "(Elucidation) A fiat surface is a two-dimensional continuant fiat boundary that is self-connected"@en; + skos:example "The surface of the Earth; the plane separating the smoking from the non-smoking zone in a restaurant"@en . + + a owl:Class; + rdfs:subClassOf , _:genid152; + dc11:identifier "031-BFO"; + rdfs:label "fiat point"@en; + skos:definition "(Elucidation) A fiat point is a zero-dimensional continuant fiat boundary that consists of a single point"@en; + skos:example "The geographic North Pole; the quadripoint where the boundaries of Colorado, Utah, New Mexico and Arizona meet; the point of origin of some spatial coordinate system"@en . + +_:genid152 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf , _:genid153; + dc11:identifier "102-BFO"; + rdfs:label "zero-dimensional temporal region"@en; + skos:definition "(Elucidation) A zero-dimensional temporal region is a temporal region that is a whole consisting of one or more separated temporal instants as parts"@en; + skos:example "A temporal region that is occupied by a process boundary; the moment at which a finger is detached in an industrial accident"@en . + +_:genid153 a owl:Restriction; + owl:allValuesFrom ; + owl:onProperty . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "138-BFO"; + rdfs:label "history"@en; + skos:definition "(Elucidation) A history is a process that is the sum of the totality of processes taking place in the spatiotemporal region occupied by the material part of a material entity"@en; + skos:example "The life of an organism from the beginning to the end of its existence"@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "155-BFO"; + rdfs:label "temporal interval"@en; + skos:definition "(Elucidation) A temporal interval is a one-dimensional temporal region that is continuous, thus without gaps or breaks"@en; + skos:example "The year 2018."@en; + skos:scopeNote "A one-dimensional temporal region can include as parts not only temporal intervals but also temporal instants separated from other parts by gaps."@en . + + a owl:Class; + rdfs:subClassOf ; + dc11:identifier "209-BFO"; + rdfs:label "temporal instant"@en; + skos:definition "(Elucidation) A temporal instant is a zero-dimensional temporal region that has no proper temporal part"@en; + skos:example "The millennium"@en . + +_:genid154 a owl:AllDisjointClasses; + owl:members _:genid157 . + +_:genid157 a rdf:List; + rdf:first ; + rdf:rest _:genid156 . + +_:genid156 a rdf:List; + rdf:first ; + rdf:rest _:genid155 . + +_:genid155 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid158 a owl:AllDisjointClasses; + owl:members _:genid161 . + +_:genid161 a rdf:List; + rdf:first ; + rdf:rest _:genid160 . + +_:genid160 a rdf:List; + rdf:first ; + rdf:rest _:genid159 . + +_:genid159 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid162 a owl:AllDisjointClasses; + owl:members _:genid166 . + +_:genid166 a rdf:List; + rdf:first ; + rdf:rest _:genid165 . + +_:genid165 a rdf:List; + rdf:first ; + rdf:rest _:genid164 . + +_:genid164 a rdf:List; + rdf:first ; + rdf:rest _:genid163 . + +_:genid163 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid167 a owl:AllDisjointClasses; + owl:members _:genid171 . + +_:genid171 a rdf:List; + rdf:first ; + rdf:rest _:genid170 . + +_:genid170 a rdf:List; + rdf:first ; + rdf:rest _:genid169 . + +_:genid169 a rdf:List; + rdf:first ; + rdf:rest _:genid168 . + +_:genid168 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . + +_:genid172 a owl:AllDisjointClasses; + owl:members _:genid175 . + +_:genid175 a rdf:List; + rdf:first ; + rdf:rest _:genid174 . + +_:genid174 a rdf:List; + rdf:first ; + rdf:rest _:genid173 . + +_:genid173 a rdf:List; + rdf:first ; + rdf:rest rdf:nil . From 721adb1478cc7c7f01f056c1df1f2ff1605ed2e4 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 18 Jun 2024 15:26:04 +0200 Subject: [PATCH 056/146] added filter to the relation creation query for #14 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 7b3d64066..a079f9049 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -72,6 +72,7 @@ public static String generateRelationCreationQuery(String[] titles, String[] val sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") + .append("AND n.ontologyId = '"+values[0].split("\\+")[0]+"' AND m.ontologyId = '"+values[2].split("\\+")[0]+"'") .append("CREATE (n)-[:") .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") .append("]->(m)"); From 72798ac1587776c7236b6eaec988ba94e3250db0 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:23:25 +0200 Subject: [PATCH 057/146] reimplemented csv2neo with commons-csv and opencsv parsers for #14 --- dataload/csv2neo/pom.xml | 16 + .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 319 ++++++------------ .../ac/ebi/spot/csv2neo/QueryGeneration.java | 29 +- 3 files changed, 124 insertions(+), 240 deletions(-) diff --git a/dataload/csv2neo/pom.xml b/dataload/csv2neo/pom.xml index b6bd05a38..a76e7023d 100644 --- a/dataload/csv2neo/pom.xml +++ b/dataload/csv2neo/pom.xml @@ -27,6 +27,22 @@ 1.5.0 compile + + + + com.opencsv + opencsv + 5.9 + + + + + org.apache.commons + commons-csv + 1.11.0 + + + diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 055487376..85a00223d 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -1,8 +1,18 @@ package uk.ac.ebi.spot.csv2neo; +import com.opencsv.CSVParser; +import com.opencsv.CSVParserBuilder; +import com.opencsv.CSVReader; +import com.opencsv.CSVReaderBuilder; +import com.opencsv.exceptions.CsvException; +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVRecord; import org.neo4j.driver.*; import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -34,72 +44,22 @@ public static List listFiles(File[] files) throws IOException { return fileList; } - public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException, java.text.ParseException { - for (File file : files){ - if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") + public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException { + + for (File file : files) { + if (!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) continue; - fr = new FileReader(file.getAbsolutePath()); - br = new BufferedReader(fr); - String line = br.readLine(); - String[] titles = {}; - if (line != null) - titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - String[] pieces = null; - StringBuilder sb = new StringBuilder(); - boolean started = false; - while((line = br.readLine())!=null){ - String appendedLine = ""; - if (line.startsWith("\"") && line.endsWith("\"")){ - if(started){ - if (line.startsWith("\",\"") && !sb.toString().isEmpty()) { - sb.append(line); - appendedLine = sb.toString(); - sb.setLength(0); - started = false; - } - else - throw new IOException("file: "+file+" - line: "+line); - } - else - appendedLine = line; - } else if (line.startsWith("\"") && !line.endsWith("\"")){ - if(started){ - if (line.startsWith("\",\"")) { - sb.append(line); - } - else - throw new IOException("file: "+file+" - line: "+line); - } - else { - sb.append(line); - started = true; - } - } else if (!line.startsWith("\"") && !line.endsWith("\"")){ - if(!started) - throw new IOException("file: "+file+" - line: "+line); - else - sb.append(line); - - } else if (!line.startsWith("\"") && line.endsWith("\"") && !sb.toString().isEmpty()){ - if(!started) - throw new IOException("file: "+file+" - line: "+line); - else { - sb.append(line); - appendedLine = sb.toString(); - sb.setLength(0); - started = false; - } - } - - if (appendedLine.isEmpty()) - continue; - - pieces = split(appendedLine, List.of(titles).indexOf("\"_json\""),titles.length,",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - String query = generateNodeCreationQuery(titles,pieces); + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader()); + String[] headers = csvParser.getHeaderNames().toArray(String[]::new); + for (CSVRecord csvRecord : csvParser) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateNodeCreationQuery(headers,row); + //System.out.println(query); if(query.isEmpty()) - System.out.println("empty query for appended line: "+appendedLine+" in file: "+file); + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); if(safe){ try (Transaction tx = session.beginTransaction()) { tx.run(query); @@ -119,65 +79,18 @@ public static void generateCreationQueries(List files, Session session, bo for (File file : files){ if((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) continue; - fr = new FileReader(file.getAbsolutePath()); - br = new BufferedReader(fr); - String line = br.readLine(); - String[] titles = {}; - if (line != null) - titles = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - String[] pieces = null; - StringBuilder sb = new StringBuilder(); - boolean started = false; - while((line = br.readLine())!=null){ - String appendedLine = ""; - if (line.startsWith("\"") && line.endsWith("\"")){ - if(started){ - if (line.startsWith("\",\"") && !sb.toString().isEmpty()) { - sb.append(line); - appendedLine = sb.toString(); - sb.setLength(0); - started = false; - } - else - throw new IOException("file: "+file+" - line: "+line); - } - else - appendedLine = line; - } else if (line.startsWith("\"") && !line.endsWith("\"")){ - if(started){ - if (line.startsWith("\",\"")) { - sb.append(line); - } - else - throw new IOException("file: "+file+" - line: "+line); - } - else { - sb.append(line); - started = true; - } - } else if (!line.startsWith("\"") && !line.endsWith("\"")){ - if(!started) - throw new IOException("file: "+file+" - line: "+line); - else - sb.append(line); - } else if (!line.startsWith("\"") && line.endsWith("\"") && !sb.toString().isEmpty()){ - if(!started) - throw new IOException("file: "+file+" - line: "+line); - else { - sb.append(line); - appendedLine = sb.toString(); - sb.setLength(0); - started = false; - } - } - if (appendedLine.isEmpty()) - continue; + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader()); + String[] headers = csvParser.getHeaderNames().toArray(String[]::new); - pieces = appendedLine.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)"); - String query = generateRelationCreationQuery(titles,pieces); + //Read CSV line by line and use the string array as you want + for (CSVRecord csvRecord : csvParser) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateRelationCreationQuery(headers,row); + //System.out.println(query); if(query.isEmpty()) - System.out.println("empty query for appended line: "+appendedLine+" in file: "+file); + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); if(safe){ try (Transaction tx = session.beginTransaction()) { tx.run(query); @@ -191,129 +104,87 @@ public static void generateCreationQueries(List files, Session session, bo } catch (Exception e){ e.printStackTrace(); } - } } } - public static String[] split(String input, int jsonIndex,int titlesLength, String regex) throws java.text.ParseException { - String[] tokens = {}; - char c = '{'; - char d = '\"'; - char e = '}'; - String left = String.valueOf(d) + c; - String right = String.valueOf(e) + d; - int countLeftCurly = countOccurrences(input, left); - int countRightCurly = countOccurrences(input, right); + public static void generateCQ(List files, Session session, boolean safe) throws IOException, CsvException { - if(countLeftCurly == 0 && countRightCurly == 0){ - tokens = input.split(regex); - } else if(countLeftCurly == countRightCurly && countLeftCurly == 1){ - String[] content = input.split("\"\\{"); - String before = ""; - String after = ""; - String json = ""; - before = content[0]; - if (before.endsWith(",")) - before = before.substring(0,before.length()-1); - String[] content2 = content[1].split("\\}\""); - json = String.valueOf(d)+String.valueOf(c)+content2[0]+String.valueOf(e)+String.valueOf(d); - after = content2[1]; - if(after.startsWith(",")) - after = after.substring(1,after.length()); - String[] beforeArray = before.split(regex); - String[] afterArray = after.split(regex); - int length = beforeArray.length + 1 + afterArray.length; - tokens = new String[length]; - for (int i =0;i= 1 && countRightCurly >= countLeftCurly){ - String before = ""; - String after = ""; - String json = ""; - int start = 0; - int end = 0; - - int countDoubleQuotes = 0; - int countCommas = 0; - for (int i = 0; i < input.length(); i++){ - if (input.charAt(i) == '"'){ - countDoubleQuotes++; - if (countDoubleQuotes % 2 == 0) - if(input.charAt(i+1) == ',') - countCommas++; - } + CSVParser parser = new CSVParserBuilder().withSeparator(',').withQuoteChar('"').build(); - if (countDoubleQuotes >= 2*jsonIndex && countCommas == jsonIndex){ - before = input.substring(0,i+1); - start = i+1; - break; - } + for (File file : files){ + if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") + || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) + continue; - } + CSVReader csvReader = new CSVReaderBuilder(new FileReader(file.getAbsolutePath())) + .withSkipLines(0) + .withCSVParser(parser) + .build(); - countDoubleQuotes = 0; - countCommas = 0; - for (int j = input.length()-1;j>-1;j--){ - if (input.charAt(j) == '"'){ - countDoubleQuotes++; - if (countDoubleQuotes % 2 == 0) - if(input.charAt(j-1) == ',') - countCommas++; - } + List allRows = csvReader.readAll(); + String[] headers = allRows.get(0); + List rows = allRows.subList(1, allRows.size()); - if (countDoubleQuotes >= 2*(titlesLength - jsonIndex -1) && countCommas == titlesLength - jsonIndex -1){ - after = input.substring(j); - end = j; - break; - } + //Read CSV line by line and use the string array as you want + for (String[] row : rows) { + String query = generateNodeCreationQuery(headers,row); + //System.out.println(query); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query); + } catch (Exception e){ + e.printStackTrace(); + } } - json = input.substring(start,end); - String[] beforeArray = before.split(regex); - String[] afterArray = after.split(regex); - int length = beforeArray.length + 1 + afterArray.length; + } - if (length == titlesLength){ - tokens = new String[length]; - for (int i =0;i allRows = csvReader.readAll(); + String[] headers = allRows.get(0); + List rows = allRows.subList(1, allRows.size()); - while ((index = input.indexOf(pattern, index)) != -1) { - count++; - index += pattern.length(); + //Read CSV line by line and use the string array as you want + for (String[] row : rows) { + String query = generateRelationCreationQuery(headers,row); + //System.out.println(query); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query); + } catch (Exception e){ + e.printStackTrace(); + } + } } - - return count; } private static Options getOptions() { @@ -367,8 +238,6 @@ public static void main(String... args) throws IOException, ParseException { e.printStackTrace(); } - } catch (java.text.ParseException e) { - throw new RuntimeException(e); } } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index a079f9049..84fb5f5e7 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -18,14 +18,14 @@ public static String generateNodeCreationQuery(String[] titles, String[] values) sb.append("CREATE (") .append(":") - .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append("`"+values[1].replace("|","`:`")+"`") .append(" {"); - sb.append("id: ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'"); + sb.append("id: ").append("\'"+values[0]+"\'"); for (int i = 2; i < values.length; i++) { - String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); + String text = values[i].replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); sb.append(", ") - .append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") + .append("`"+titles[i].split(":")[0].replaceAll("\"\"","\"")+"`") .append(": ") .append(convertToJSONArray("\'"+text+"\'")); } @@ -36,8 +36,8 @@ public static String generateNodeCreationQuery(String[] titles, String[] values) } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); - for (String value : values) - System.out.println("value: "+value); + /*for (String value : values) + System.out.println("value: "+value);*/ } return sb.toString(); } @@ -69,18 +69,18 @@ public static String generateRelationCreationQuery(String[] titles, String[] val StringBuilder sb = new StringBuilder(); if (titles.length == values.length){ - sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"}),") - .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"}) ") + sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0]+"\'"+"}),") + .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2]+"\'"+"}) ") .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") .append("AND n.ontologyId = '"+values[0].split("\\+")[0]+"' AND m.ontologyId = '"+values[2].split("\\+")[0]+"'") .append("CREATE (n)-[:") - .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") + .append("`"+values[1].replace("|","`:`")+"`") .append("]->(m)"); } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); - for (String value : values) - System.out.println("value: "+value); + /*for (String value : values) + System.out.println("value: "+value);*/ } return sb.toString(); @@ -89,9 +89,9 @@ public static String generateRelationCreationQuery(String[] titles, String[] val public static String generateRelationCreationQuery2(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); if (titles.length == values.length){ - sb.append("MATCH (n {id: "+"\'"+values[0].substring(1, values[0].length() - 1)+"\'"+"})-[:") - .append("`"+values[1].substring(1, values[1].length() - 1).replace("|","`:`")+"`") - .append("]->(m {id: "+"\'"+values[2].substring(1, values[2].length() - 1)+"\'"+"})"); + sb.append("MATCH (n {id: "+"\'"+values[0]+"\'"+"})-[:") + .append("`"+values[1].replace("|","`:`")+"`") + .append("]->(m {id: "+"\'"+values[2]+"\'"+"})"); } return sb.toString(); @@ -147,5 +147,4 @@ public static String idToLabel(String id){ }; return label; } - } From c84bc3d172f944e9ba01e90833cb712cccd9b71c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 24 Jul 2024 16:10:20 +0200 Subject: [PATCH 058/146] trimmed leading and trailing blanks in parser and corrected missing blank in relation query for #14 --- .../main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 10 ++-------- .../java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java | 2 +- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 85a00223d..113d5c705 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -52,7 +52,7 @@ public static void generateCreationQueries(List files, Session session, bo continue; Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); - org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader()); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); for (CSVRecord csvRecord : csvParser) { String[] row = csvRecord.toList().toArray(String[]::new); @@ -81,10 +81,9 @@ public static void generateCreationQueries(List files, Session session, bo continue; Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); - org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader()); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); - //Read CSV line by line and use the string array as you want for (CSVRecord csvRecord : csvParser) { String[] row = csvRecord.toList().toArray(String[]::new); String query = generateRelationCreationQuery(headers,row); @@ -126,10 +125,8 @@ public static void generateCQ(List files, Session session, boolean safe) t String[] headers = allRows.get(0); List rows = allRows.subList(1, allRows.size()); - //Read CSV line by line and use the string array as you want for (String[] row : rows) { String query = generateNodeCreationQuery(headers,row); - //System.out.println(query); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); if(safe){ @@ -146,8 +143,6 @@ public static void generateCQ(List files, Session session, boolean safe) t e.printStackTrace(); } } - - } for (File file : files){ @@ -159,7 +154,6 @@ public static void generateCQ(List files, Session session, boolean safe) t .withCSVParser(parser) .build(); - List allRows = csvReader.readAll(); String[] headers = allRows.get(0); List rows = allRows.subList(1, allRows.size()); diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 84fb5f5e7..61f05e4b7 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -72,7 +72,7 @@ public static String generateRelationCreationQuery(String[] titles, String[] val sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0]+"\'"+"}),") .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2]+"\'"+"}) ") .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") - .append("AND n.ontologyId = '"+values[0].split("\\+")[0]+"' AND m.ontologyId = '"+values[2].split("\\+")[0]+"'") + .append("AND n.ontologyId = '"+values[0].split("\\+")[0]+"' AND m.ontologyId = '"+values[2].split("\\+")[0]+"' ") .append("CREATE (n)-[:") .append("`"+values[1].replace("|","`:`")+"`") .append("]->(m)"); From e56c9ba2115e062a7013cbb57875b4b28b03547c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 24 Jul 2024 17:10:12 +0200 Subject: [PATCH 059/146] made indexes conditional for #14 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 113d5c705..5ea82c2d2 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -210,9 +210,9 @@ public static void main(String... args) throws IOException, ParseException { driver.verifyConnectivity(); try (var session = driver.session(SessionConfig.builder().withDatabase(db).build())) { try{ - session.run("CREATE CONSTRAINT FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); - session.run("CREATE CONSTRAINT FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); - session.run("CREATE CONSTRAINT FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); + session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); + session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); + session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); } catch(Exception e){ e.printStackTrace(); } From d4aa43b35219352e6e87a79b2310e851ed7d3a8d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 24 Jul 2024 20:24:34 +0200 Subject: [PATCH 060/146] updated dataload documentation for #14 --- dataload/README.md | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/dataload/README.md b/dataload/README.md index 109a7858e..597fd00e9 100644 --- a/dataload/README.md +++ b/dataload/README.md @@ -16,16 +16,26 @@ Use rdf2json to download all the OWL files, resolve imports, and export JSON fil Now (after about 15 min) you should have a huge file called `foundry_out.json` that contains not only the original config for each ontology loaded from `foundry.json`, but also the ontologies themselves represented in an intermediate JSON format! (Note: the intermediate JSON format is a non-standardised application format totally specific to this tool and is subject to change.) +## Step 2: Link JSON +Use linker to link the json into a jsonl file. + + java -jar linker/target/linker-1.0-SNAPSHOT.jar --input foundry_out.json --output foundry_out.jsonl + ## Step 2: JSON to CSV *for Neo4j* You can now convert this huge JSON file to a CSV file ready for Neo4j, using json2neo: rm -rf output_csv && mkdir output_csv - java -jar json2neo/target/json2neo-1.0-SNAPSHOT.jar --input foundry_out_flat.json --outDir output_csv + java -jar json2neo/target/json2neo-1.0-SNAPSHOT.jar --input foundry_out_.jsonl --outDir output_csv ## Step 3: CSV to Neo4j -Now (after 5-10 mins) you should have a directory full of CSV files. These files are formatted especially for Neo4j. You can load them using `neo4j-admin import`, but you'll need to provide the filename of every single CSV file on the command line, which is boring, so included in this repo is a script called `make_csv_import_cmd.sh` that generates the command line for you. +Now (after 5-10 mins) you should have a directory full of CSV files. These files are formatted especially for Neo4j. You can load them using `neo4j-admin import` command or the `csv2neo` module. + +### Alternative 1: Neo4j Import Command + +The Neo4J import command can only be used when initializing a database in the community edition of Neo4J. On the contrary, the enterprise version of Neo4j enables multiple imports which can yield in a more flexible ontology ingestion. +IWhen you are using `neo4j-admin import` command, you'll need to provide the filename of every single CSV file on the command line, which is boring, so included in this repo is a script called `make_csv_import_cmd.sh` that generates the command line for you. neo4j-admin import \ --ignore-empty-strings=true \ @@ -37,10 +47,16 @@ Now (after 5-10 mins) you should have a directory full of CSV files. These files Now you should have a Neo4j database ready to start! +### Alternative2: CSV to Neo4J Module: + +The module is flexible and enables you to perform multiple ingestions on a live database. It can be triggered with the following command: + + java -jar csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -i -d output_csv + ## Step 4: JSON to JSON *for Solr* -Similar to how the Neo4j CSV was generated, you can also generate JSON files ready for uploading to SOLR using neo2solr. +Similar to how the Neo4j CSV was generated, you can also generate JSON files ready for uploading to SOLR using `json2solr` which can also be performed on a live Solr instance. - java -jar json2solr/target/json2solr-1.0-SNAPSHOT.jar --input foundry_out_flat.json --outDir output_csv + java -jar json2solr/target/json2solr-1.0-SNAPSHOT.jar --input foundry_out.jsonl --outDir output_csv From d2062f45b8df85b8da38862082428ad8cd192152 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 24 Jul 2024 20:28:33 +0200 Subject: [PATCH 061/146] updated numbering on dataload documentation for #14 --- dataload/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dataload/README.md b/dataload/README.md index 597fd00e9..f55b0ba20 100644 --- a/dataload/README.md +++ b/dataload/README.md @@ -21,14 +21,14 @@ Use linker to link the json into a jsonl file. java -jar linker/target/linker-1.0-SNAPSHOT.jar --input foundry_out.json --output foundry_out.jsonl -## Step 2: JSON to CSV *for Neo4j* +## Step 3: JSON to CSV *for Neo4j* You can now convert this huge JSON file to a CSV file ready for Neo4j, using json2neo: rm -rf output_csv && mkdir output_csv java -jar json2neo/target/json2neo-1.0-SNAPSHOT.jar --input foundry_out_.jsonl --outDir output_csv -## Step 3: CSV to Neo4j +## Step 4: CSV to Neo4j Now (after 5-10 mins) you should have a directory full of CSV files. These files are formatted especially for Neo4j. You can load them using `neo4j-admin import` command or the `csv2neo` module. @@ -53,7 +53,7 @@ The module is flexible and enables you to perform multiple ingestions on a live java -jar csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -i -d output_csv -## Step 4: JSON to JSON *for Solr* +## Step 5: JSON to JSON *for Solr* Similar to how the Neo4j CSV was generated, you can also generate JSON files ready for uploading to SOLR using `json2solr` which can also be performed on a live Solr instance. From 00b2fa627e75b03136ab708bc6d0e3af10a780ce Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 24 Jul 2024 22:06:17 +0200 Subject: [PATCH 062/146] refactored query execution for #14 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 73 ++++++------------- 1 file changed, 21 insertions(+), 52 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 5ea82c2d2..26bf51363 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -60,19 +60,7 @@ public static void generateCreationQueries(List files, Session session, bo //System.out.println(query); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - if(safe){ - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } + executeQuery(session, safe, query); } } @@ -90,19 +78,7 @@ public static void generateCreationQueries(List files, Session session, bo //System.out.println(query); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - if(safe){ - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } + executeQuery(session, safe, query); } } } @@ -129,19 +105,7 @@ public static void generateCQ(List files, Session session, boolean safe) t String query = generateNodeCreationQuery(headers,row); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - if(safe){ - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } + executeQuery(session, safe, query); } } @@ -164,23 +128,28 @@ public static void generateCQ(List files, Session session, boolean safe) t //System.out.println(query); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - if(safe){ - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } + executeQuery(session, safe, query); + } } } + private static void executeQuery(Session session, boolean safe, String query){ + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query); + } catch (Exception e){ + e.printStackTrace(); + } + } + private static Options getOptions() { Options options = new Options(); options.addOption("i", "ingest",false, "ingest ontologies"); From 6a1cb6a736d0d84d55b8e34e1361080ff1038106 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 25 Jul 2024 10:23:41 +0200 Subject: [PATCH 063/146] handled specific json syntax exception for #14 --- .../ac/ebi/spot/ols/service/Neo4jClient.java | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java index cfbab9afd..764e25c54 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java @@ -6,9 +6,8 @@ import java.util.stream.Collectors; import com.google.common.base.Stopwatch; -import com.google.gson.Gson; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; +import com.google.gson.*; +import com.google.gson.stream.MalformedJsonException; import org.neo4j.driver.*; import org.neo4j.driver.Record; import org.neo4j.driver.exceptions.NoSuchRecordException; @@ -130,7 +129,7 @@ public Page queryPaginated(String query, String resVar, String coun Page page = new PageImpl<>( result.list().stream() - .map(r -> JsonParser.parseString(r.get(resVar).get("_json").asString())) + .map(r -> parseElementByRecord(r,resVar)) .collect(Collectors.toList()), pageable, count); @@ -138,6 +137,25 @@ public Page queryPaginated(String query, String resVar, String coun return page; } + public JsonElement parseElementByRecord(Record r, String resVar){ + JsonElement parsed = new JsonObject(); + + try { + parsed = JsonParser.parseString(r.get(resVar).get("_json").asString()); + } catch (JsonSyntaxException jse){ + System.out.println("invalid json: "+r.get(resVar).get("_json").asString()); + System.out.println(jse.getMessage() + " - Some suspicious fragments will be removed from json."); + try { + parsed = JsonParser.parseString(r.get(resVar).get("_json").asString().replaceAll("\"\\\\\"", "\"").replaceAll("\\\\\"", "\"")); + } catch (JsonSyntaxException jse2){ + System.out.println("invalid trimmed json: "+r.get(resVar).get("_json").asString().replaceAll("\"\\\\\"", "\"")); + System.out.println(jse2.getMessage() + " - default non-map value will be assigned."); + } + } + + return parsed; + } + public JsonElement queryOne(String query, String resVar, Value parameters) { Session session = getSession(); From ade77549584b6a658a71d15f7175f872f3c18b3a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 25 Jul 2024 13:05:19 +0200 Subject: [PATCH 064/146] handled specific uncoercible value exception for #14 --- .../main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java index 764e25c54..ce77e36af 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java @@ -7,7 +7,6 @@ import com.google.common.base.Stopwatch; import com.google.gson.*; -import com.google.gson.stream.MalformedJsonException; import org.neo4j.driver.*; import org.neo4j.driver.Record; import org.neo4j.driver.exceptions.NoSuchRecordException; @@ -151,6 +150,11 @@ public JsonElement parseElementByRecord(Record r, String resVar){ System.out.println("invalid trimmed json: "+r.get(resVar).get("_json").asString().replaceAll("\"\\\\\"", "\"")); System.out.println(jse2.getMessage() + " - default non-map value will be assigned."); } + } catch(org.neo4j.driver.exceptions.value.Uncoercible u) { + System.out.println(u.getMessage() + " - Object is tried instead of String. External Array characters are removed. "); + String s = r.get(resVar).get("_json").asObject().toString(); + System.out.println("object json: "+s.substring(1, s.length() - 1)); + parsed = JsonParser.parseString(s.substring(1, s.length() - 1)); } return parsed; From 8202820731746d32b90b8438de13f69993f2fd2b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 25 Jul 2024 14:34:28 +0200 Subject: [PATCH 065/146] added missing import --- .../controller/api/v1/V1SearchController.java | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index 8029e62ab..2a216298a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -22,16 +22,14 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.Pageable; -import org.springframework.data.web.PageableDefault; import org.springframework.http.MediaType; -import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import org.springframework.web.bind.annotation.RestController; import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; @@ -202,12 +200,12 @@ public void search( // solrQuery.addHighlightField("https://github.com/EBISPOT/owl2neo#definition"); // solrQuery.addFacetField("ontology_name", "ontology_prefix", "type", "subset", "is_defining_ontology", "is_obsolete"); - + /* * Fix: Start issue - * https://github.com/EBISPOT/ols4/issues/613 * Added new OLS4 faceFields - * + * */ // TODO: Need to check and add additional faceted fields if required solrQuery.addFacetField("ontologyId", "ontologyIri", "ontologyPreferredPrefix", "type", "isDefiningOntology", "isObsolete"); @@ -297,7 +295,7 @@ public void search( responseBody.put("numFound", qr.getResults().getNumFound()); responseBody.put("start", start); responseBody.put("docs", docs); - + /* * Fix: Start issue - * https://github.com/EBISPOT/ols4/issues/613 @@ -314,7 +312,7 @@ public void search( Map responseObj = new HashMap<>(); responseObj.put("responseHeader", responseHeader); responseObj.put("response", responseBody); - + /* * Fix: Start issue - * https://github.com/EBISPOT/ols4/issues/613 @@ -330,7 +328,7 @@ public void search( response.getOutputStream().write(gson.toJson(responseObj).getBytes(StandardCharsets.UTF_8)); response.flushBuffer(); } - + private Map> parseFacetFields(List facetFields) { Map> facetFieldsMap = new HashMap<>(); List newFacetFields; @@ -381,9 +379,4 @@ private String createUnionQuery(String query, String[] fields, boolean exact) { } return builder.toString(); } - - - - - } From 3628f88685c01ad7ebca8f9b2348c8aa96d49078 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 6 Aug 2024 11:18:48 +0200 Subject: [PATCH 066/146] implemented alternative query generation approach with external query parameters on a template query for #22 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 26 ++++++++-- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 50 +++++++++++++++++-- 2 files changed, 67 insertions(+), 9 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 26bf51363..78af117ac 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -14,6 +14,8 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; @@ -56,11 +58,11 @@ public static void generateCreationQueries(List files, Session session, bo String[] headers = csvParser.getHeaderNames().toArray(String[]::new); for (CSVRecord csvRecord : csvParser) { String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateNodeCreationQuery(headers,row); - //System.out.println(query); + String query = generateBlankNodeCreationQuery(headers,row); + Map params = generateProps(headers,row); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, safe, query); + executeBlankQuery(session, params, safe, query); } } @@ -75,7 +77,6 @@ public static void generateCreationQueries(List files, Session session, bo for (CSVRecord csvRecord : csvParser) { String[] row = csvRecord.toList().toArray(String[]::new); String query = generateRelationCreationQuery(headers,row); - //System.out.println(query); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); executeQuery(session, safe, query); @@ -122,7 +123,6 @@ public static void generateCQ(List files, Session session, boolean safe) t String[] headers = allRows.get(0); List rows = allRows.subList(1, allRows.size()); - //Read CSV line by line and use the string array as you want for (String[] row : rows) { String query = generateRelationCreationQuery(headers,row); //System.out.println(query); @@ -150,6 +150,22 @@ private static void executeQuery(Session session, boolean safe, String query){ } } + private static void executeBlankQuery(Session session, Map params, boolean safe, String query){ + if(safe){ + try (Transaction tx = session.beginTransaction()) { + tx.run(query, params); + tx.commit(); + } catch(Exception e){ + e.printStackTrace(); + } + } else + try{ + session.run(query, params); + } catch (Exception e){ + e.printStackTrace(); + } + } + private static Options getOptions() { Options options = new Options(); options.addOption("i", "ingest",false, "ingest ontologies"); diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 61f05e4b7..ca914865f 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -1,5 +1,7 @@ package uk.ac.ebi.spot.csv2neo; +import java.util.HashMap; +import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -36,12 +38,54 @@ public static String generateNodeCreationQuery(String[] titles, String[] values) } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); - /*for (String value : values) - System.out.println("value: "+value);*/ } return sb.toString(); } + public static String generateBlankNodeCreationQuery(String[] titles, String[] values){ + + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length) { + + sb.append("CREATE (") + .append(":") + .append("`"+values[1].replace("|","`:`")+"`") + .append(" $props") + + .append(")") + .append(" "); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + } + return sb.toString(); + } + + public static Map generateProps(String[] titles, String[] values){ + + Map props = new HashMap<>(); + if (titles.length == values.length) { + for (int i = 0; i < values.length; i++) { + if (i == 0) + props.put("id",values[i]); + if(i !=1){ + props.put(titles[i].split(":")[0].replaceAll("\"\"","\""),convertToJSONArray(values[i])); + } + } + + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + } + + Map params = new HashMap<>(); + params.put( "props", props ); + + return params; + } + + public static String generateNodeSetQuery(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); @@ -79,8 +123,6 @@ public static String generateRelationCreationQuery(String[] titles, String[] val } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); - /*for (String value : values) - System.out.println("value: "+value);*/ } return sb.toString(); From efe7dc6f6643745a983bbe7fa70fef92cb23fdab Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 6 Aug 2024 12:38:11 +0200 Subject: [PATCH 067/146] refactored for #22 --- dataload/csv2neo/pom.xml | 10 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 87 +++---------------- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 66 +------------- 3 files changed, 14 insertions(+), 149 deletions(-) diff --git a/dataload/csv2neo/pom.xml b/dataload/csv2neo/pom.xml index a76e7023d..027771dc5 100644 --- a/dataload/csv2neo/pom.xml +++ b/dataload/csv2neo/pom.xml @@ -27,22 +27,12 @@ 1.5.0 compile - - - - com.opencsv - opencsv - 5.9 - - org.apache.commons commons-csv 1.11.0 - - diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 78af117ac..0a2bb2f43 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -1,10 +1,5 @@ package uk.ac.ebi.spot.csv2neo; -import com.opencsv.CSVParser; -import com.opencsv.CSVParserBuilder; -import com.opencsv.CSVReader; -import com.opencsv.CSVReaderBuilder; -import com.opencsv.exceptions.CsvException; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVRecord; import org.neo4j.driver.*; @@ -62,7 +57,7 @@ public static void generateCreationQueries(List files, Session session, bo Map params = generateProps(headers,row); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeBlankQuery(session, params, safe, query); + executeQuery(session, params, safe, query); } } @@ -79,88 +74,28 @@ public static void generateCreationQueries(List files, Session session, bo String query = generateRelationCreationQuery(headers,row); if(query.isEmpty()) System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, safe, query); + executeQuery(session, null, safe, query); } } } - public static void generateCQ(List files, Session session, boolean safe) throws IOException, CsvException { - - CSVParser parser = new CSVParserBuilder().withSeparator(',').withQuoteChar('"').build(); - - for (File file : files){ - if(!(file.getName().contains("_ontologies") || file.getName().contains("_properties") - || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) - continue; - - CSVReader csvReader = new CSVReaderBuilder(new FileReader(file.getAbsolutePath())) - .withSkipLines(0) - .withCSVParser(parser) - .build(); - - List allRows = csvReader.readAll(); - String[] headers = allRows.get(0); - List rows = allRows.subList(1, allRows.size()); - - for (String[] row : rows) { - String query = generateNodeCreationQuery(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, safe, query); - } - } - - for (File file : files){ - if((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) - continue; - - CSVReader csvReader = new CSVReaderBuilder(new FileReader(file.getAbsolutePath())) - .withSkipLines(0) - .withCSVParser(parser) - .build(); - - List allRows = csvReader.readAll(); - String[] headers = allRows.get(0); - List rows = allRows.subList(1, allRows.size()); - - for (String[] row : rows) { - String query = generateRelationCreationQuery(headers,row); - //System.out.println(query); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, safe, query); - - } - } - } - - private static void executeQuery(Session session, boolean safe, String query){ - if(safe){ - try (Transaction tx = session.beginTransaction()) { - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } - } - - private static void executeBlankQuery(Session session, Map params, boolean safe, String query){ + private static void executeQuery(Session session, Map params, boolean safe, String query){ if(safe){ try (Transaction tx = session.beginTransaction()) { - tx.run(query, params); + if (params != null) + tx.run(query, params); + else + tx.run(query); tx.commit(); } catch(Exception e){ e.printStackTrace(); } } else try{ - session.run(query, params); + if (params != null) + session.run(query, params); + else + session.run(query); } catch (Exception e){ e.printStackTrace(); } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index ca914865f..7fe3bd328 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -12,40 +12,8 @@ */ public class QueryGeneration { - public static String generateNodeCreationQuery(String[] titles, String[] values){ - - StringBuilder sb = new StringBuilder(); - - if (titles.length == values.length) { - - sb.append("CREATE (") - .append(":") - .append("`"+values[1].replace("|","`:`")+"`") - .append(" {"); - sb.append("id: ").append("\'"+values[0]+"\'"); - - for (int i = 2; i < values.length; i++) { - String text = values[i].replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); - sb.append(", ") - .append("`"+titles[i].split(":")[0].replaceAll("\"\"","\"")+"`") - .append(": ") - .append(convertToJSONArray("\'"+text+"\'")); - } - - sb.append("}") - .append(")") - .append(" "); - } else { - System.out.println("titles and values are not equal"); - System.out.println("titles: "+titles.length + " - values: " +values.length); - } - return sb.toString(); - } - public static String generateBlankNodeCreationQuery(String[] titles, String[] values){ - StringBuilder sb = new StringBuilder(); - if (titles.length == values.length) { sb.append("CREATE (") @@ -65,14 +33,10 @@ public static String generateBlankNodeCreationQuery(String[] titles, String[] va public static Map generateProps(String[] titles, String[] values){ Map props = new HashMap<>(); + props.put("id",values[0]); if (titles.length == values.length) { - for (int i = 0; i < values.length; i++) { - if (i == 0) - props.put("id",values[i]); - if(i !=1){ - props.put(titles[i].split(":")[0].replaceAll("\"\"","\""),convertToJSONArray(values[i])); - } - } + for (int i = 2; i < values.length; i++) + props.put(titles[i].split(":")[0].replaceAll("\"\"","\""),convertToJSONArray(values[i])); } else { System.out.println("titles and values are not equal"); @@ -85,30 +49,6 @@ public static Map generateProps(String[] titles, String[] values) return params; } - - public static String generateNodeSetQuery(String[] titles, String[] values){ - - StringBuilder sb = new StringBuilder(); - - if (titles.length == values.length){ - sb.append("MATCH (n) where n.id = ").append("\'"+values[0].substring(1, values[0].length() - 1)+"\'").append(" SET "); - - boolean first = true; - - for (int i = 2; i < values.length; i++){ - if(!first) - sb.append(" AND "); - first = false; - String text = values[i].substring(1, values[i].length() - 1).replaceAll("\"\"","\"").replaceAll("\\\\", "\\\\\\\\").replaceAll("\'","\\\\'"); - sb.append("n.").append("`"+titles[i].substring(1, titles[i].length() - 1).split(":")[0].replaceAll("\"\"","\"")+"`") - .append(" = ").append(convertToJSONArray("\'"+text+"\'")); - } - - } - - return sb.toString(); - } - public static String generateRelationCreationQuery(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); From 653162badb1963866b7bfa1ea8ad2ee0b1f9439a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 7 Aug 2024 22:30:07 +0200 Subject: [PATCH 068/146] batch based solution is implemented for query based data creation in #25 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 123 ++++++++++-------- 1 file changed, 66 insertions(+), 57 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 0a2bb2f43..67bce5e5c 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -6,11 +6,7 @@ import java.io.*; import java.nio.file.Files; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; - +import java.util.*; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; @@ -41,64 +37,63 @@ public static List listFiles(File[] files) throws IOException { return fileList; } - public static void generateCreationQueries(List files, Session session, boolean safe) throws IOException { - + public static void executeBatchedNodeQueries(List files, Session session, int batchSize) throws IOException { for (File file : files) { if (!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) continue; - Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); - for (CSVRecord csvRecord : csvParser) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateBlankNodeCreationQuery(headers,row); - Map params = generateProps(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, params, safe, query); + List> splitRecords = splitList(csvParser.getRecords(),batchSize); + for (List records : splitRecords){ + try (Transaction tx = session.beginTransaction()){ + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateBlankNodeCreationQuery(headers,row); + Map params = generateProps(headers,row); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); + else + tx.run(query,params); + } + tx.commit(); + } } } + } - for (File file : files){ - if((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) + public static void executeBatchedRelationshipQueries(List files, Session session, int batchSize) throws IOException { + for (File file : files) { + if ((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) continue; Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); - - for (CSVRecord csvRecord : csvParser) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateRelationCreationQuery(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - executeQuery(session, null, safe, query); + List> splitRecords = splitList(csvParser.getRecords(), batchSize); + for (List records : splitRecords){ + try (Transaction tx = session.beginTransaction()){ + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateRelationCreationQuery(headers,row); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); + else + tx.run(query); + } + tx.commit(); + } } } } - private static void executeQuery(Session session, Map params, boolean safe, String query){ - if(safe){ - try (Transaction tx = session.beginTransaction()) { - if (params != null) - tx.run(query, params); - else - tx.run(query); - tx.commit(); - } catch(Exception e){ - e.printStackTrace(); - } - } else - try{ - if (params != null) - session.run(query, params); - else - session.run(query); - } catch (Exception e){ - e.printStackTrace(); - } + public static List> splitList(List list, int batchSize) { + List> subLists = new ArrayList<>(); + for (int i = 0; i < list.size(); i += batchSize) { + subLists.add(new ArrayList<>(list.subList(i, Math.min(i + batchSize, list.size())))); + } + return subLists; } private static Options getOptions() { @@ -111,7 +106,7 @@ private static Options getOptions() { options.addOption("uri", "databaseuri",true, "neo4j database uri"); options.addOption("db", "database",true, "neo4j database name"); options.addOption("d", "directory",true, "neo4j csv import directory"); - options.addOption("s", "safe",false, "execute each neo4j query in transactions or the session"); + options.addOption("bs", "batchsize",true, "batch size for splitting queries into multiple transactions."); return options; } @@ -125,25 +120,40 @@ public static void main(String... args) throws IOException, ParseException { final String dbPassword = cmd.hasOption("pw") ? cmd.getOptionValue("pw") : "testtest"; final String directory = cmd.hasOption("d") ? cmd.getOptionValue("d") : "/tmp/out"; final String ontologiesToBeRemoved = cmd.hasOption("rm") ? cmd.getOptionValue("rm") : ""; + final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); try (var session = driver.session(SessionConfig.builder().withDatabase(db).build())) { - try{ - session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); - session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); - session.run("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); - } catch(Exception e){ - e.printStackTrace(); - } + List indexCommands = new ArrayList<>(); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyProperty) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyIndividual) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE TEXT INDEX ontology_id IF NOT EXISTS FOR (n:Ontology) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX entity_id IF NOT EXISTS FOR (n:OntologyEntity) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX class_id IF NOT EXISTS FOR (n:OntologyClass) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX property_id IF NOT EXISTS FOR (n:OntologyProperty) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX individual_id IF NOT EXISTS FOR (n:OntologyIndividual) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX ontology_ont_id IF NOT EXISTS FOR (n:Ontology) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX entity_ont_id IF NOT EXISTS FOR (n:OntologyEntity) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX class_ont_id IF NOT EXISTS FOR (n:OntologyClass) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX property_ont_id IF NOT EXISTS FOR (n:OntologyProperty) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX individual_ont_id IF NOT EXISTS FOR (n:OntologyIndividual) ON (n.ontologyId)"); + for (String command : indexCommands) + try{ + session.run(command); + } catch(Exception e){ + System.out.println("Could not create constraint: "+e.getMessage()); + } + System.out.println("Start Neo4J Modification..."); if(cmd.hasOption("i")){ File dir = new File(directory); List files = listFiles(dir.listFiles()); - if(cmd.hasOption("s")) - generateCreationQueries(files,session,true); - else - generateCreationQueries(files,session,false); + executeBatchedNodeQueries(files, session,batchSize); + executeBatchedRelationshipQueries(files,session,batchSize); } else for(String ontology : ontologiesToBeRemoved.split(",")) try { @@ -151,7 +161,6 @@ public static void main(String... args) throws IOException, ParseException { } catch (Exception e){ e.printStackTrace(); } - } } } From 8903a7a318a14ec561d01823a2470b68294332df Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 8 Aug 2024 19:15:39 +0200 Subject: [PATCH 069/146] multithreading is implemented for query based data creation in #25 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 51 ++++++++--------- .../spot/csv2neo/NodeCreationQueryTask.java | 57 +++++++++++++++++++ .../RelationShipCreationQueryTask.java | 54 ++++++++++++++++++ 3 files changed, 135 insertions(+), 27 deletions(-) create mode 100644 dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java create mode 100644 dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 67bce5e5c..60ebc5255 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -7,6 +7,10 @@ import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; @@ -37,7 +41,7 @@ public static List listFiles(File[] files) throws IOException { return fileList; } - public static void executeBatchedNodeQueries(List files, Session session, int batchSize) throws IOException { + public static void executeBatchedNodeQueries(List files, Driver driver, int batchSize, int poolSize) throws IOException, InterruptedException { for (File file : files) { if (!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) @@ -46,24 +50,18 @@ public static void executeBatchedNodeQueries(List files, Session session, org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); List> splitRecords = splitList(csvParser.getRecords(),batchSize); + CountDownLatch latch = new CountDownLatch(splitRecords.size()); + ExecutorService executorService = Executors.newFixedThreadPool(poolSize); for (List records : splitRecords){ - try (Transaction tx = session.beginTransaction()){ - for (CSVRecord csvRecord : records) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateBlankNodeCreationQuery(headers,row); - Map params = generateProps(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - else - tx.run(query,params); - } - tx.commit(); - } + NodeCreationQueryTask task = new NodeCreationQueryTask(driver,latch, records,headers,file); + executorService.submit(task); } + latch.await(); + executorService.shutdown(); } } - public static void executeBatchedRelationshipQueries(List files, Session session, int batchSize) throws IOException { + public static void executeBatchedRelationshipQueries(List files, Driver driver, int batchSize, int poolSize) throws IOException, InterruptedException { for (File file : files) { if ((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) continue; @@ -72,19 +70,14 @@ public static void executeBatchedRelationshipQueries(List files, Session s org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); String[] headers = csvParser.getHeaderNames().toArray(String[]::new); List> splitRecords = splitList(csvParser.getRecords(), batchSize); + CountDownLatch latch = new CountDownLatch(splitRecords.size()); + ExecutorService executorService = Executors.newFixedThreadPool(poolSize); for (List records : splitRecords){ - try (Transaction tx = session.beginTransaction()){ - for (CSVRecord csvRecord : records) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateRelationCreationQuery(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+Arrays.toString(row)+" in file: "+file); - else - tx.run(query); - } - tx.commit(); - } + RelationShipCreationQueryTask task = new RelationShipCreationQueryTask(driver,latch,records,headers,file); + executorService.submit(task); } + latch.await(); + executorService.shutdown(); } } @@ -107,6 +100,7 @@ private static Options getOptions() { options.addOption("db", "database",true, "neo4j database name"); options.addOption("d", "directory",true, "neo4j csv import directory"); options.addOption("bs", "batchsize",true, "batch size for splitting queries into multiple transactions."); + options.addOption("ps", "pool size",true, "number of threads in the pool"); return options; } @@ -121,6 +115,7 @@ public static void main(String... args) throws IOException, ParseException { final String directory = cmd.hasOption("d") ? cmd.getOptionValue("d") : "/tmp/out"; final String ontologiesToBeRemoved = cmd.hasOption("rm") ? cmd.getOptionValue("rm") : ""; final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; + final int poolSize = cmd.hasOption("ps") && Integer.parseInt(cmd.getOptionValue("ps"))>0 ? Integer.parseInt(cmd.getOptionValue("ps")) : 20; try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); @@ -152,8 +147,8 @@ public static void main(String... args) throws IOException, ParseException { if(cmd.hasOption("i")){ File dir = new File(directory); List files = listFiles(dir.listFiles()); - executeBatchedNodeQueries(files, session,batchSize); - executeBatchedRelationshipQueries(files,session,batchSize); + executeBatchedNodeQueries(files,driver,batchSize,poolSize); + executeBatchedRelationshipQueries(files,driver,batchSize, poolSize); } else for(String ontology : ontologiesToBeRemoved.split(",")) try { @@ -161,6 +156,8 @@ public static void main(String... args) throws IOException, ParseException { } catch (Exception e){ e.printStackTrace(); } + } catch (InterruptedException e) { + throw new RuntimeException(e); } } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java new file mode 100644 index 000000000..60c65367f --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java @@ -0,0 +1,57 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.apache.commons.csv.CSVRecord; +import org.neo4j.driver.Driver; +import org.neo4j.driver.Session; + +import java.io.File; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.generateBlankNodeCreationQuery; +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.generateProps; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class NodeCreationQueryTask implements Runnable { + + private final Driver driver; + private final CountDownLatch latch; + private final List records; + private final String[] headers; + private final File file; + + public NodeCreationQueryTask(Driver driver,CountDownLatch latch, List records, String[] headers, File file) { + this.driver = driver; + this.latch = latch; + this.records = records; + this.headers = headers; + this.file = file; + + } + + @Override + public void run() { + try (Session session = driver.session()) { + session.writeTransaction(tx -> { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateBlankNodeCreationQuery(headers,row); + Map params = generateProps(headers,row); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+ Arrays.toString(row)+" in file: "+file); + else + tx.run(query,params); + } + return null; + }); + } finally { + latch.countDown(); + } + } +} diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java new file mode 100644 index 000000000..0866b7d8a --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java @@ -0,0 +1,54 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.apache.commons.csv.CSVRecord; +import org.neo4j.driver.Driver; +import org.neo4j.driver.Session; + +import java.io.File; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class RelationShipCreationQueryTask implements Runnable { + + private final Driver driver; + private final CountDownLatch latch; + private final List records; + private final String[] headers; + private final File file; + + public RelationShipCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file) { + this.driver = driver; + this.latch = latch; + this.records = records; + this.headers = headers; + this.file = file; + } + + @Override + public void run() { + try (Session session = driver.session()) { + session.writeTransaction(tx -> { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateRelationCreationQuery(headers,row); + if(query.isEmpty()) + System.out.println("empty query for appended line: "+ Arrays.toString(row)+" in file: "+file); + else + tx.run(query); + } + return null; + }); + }finally { + latch.countDown(); + } + } +} From d8f108b800d15d6dd1ae3492db76cc0eb61b4a36 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 12 Aug 2024 12:32:39 +0200 Subject: [PATCH 070/146] implemented routine for array type query along with string type queries for #30# --- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 66 +++---------------- 1 file changed, 9 insertions(+), 57 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 7fe3bd328..8d92fa53a 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -2,8 +2,6 @@ import java.util.HashMap; import java.util.Map; -import java.util.regex.Matcher; -import java.util.regex.Pattern; /** * @author Erhun Giray TUNCAY @@ -15,12 +13,10 @@ public class QueryGeneration { public static String generateBlankNodeCreationQuery(String[] titles, String[] values){ StringBuilder sb = new StringBuilder(); if (titles.length == values.length) { - sb.append("CREATE (") .append(":") .append("`"+values[1].replace("|","`:`")+"`") .append(" $props") - .append(")") .append(" "); } else { @@ -31,21 +27,23 @@ public static String generateBlankNodeCreationQuery(String[] titles, String[] va } public static Map generateProps(String[] titles, String[] values){ - Map props = new HashMap<>(); props.put("id",values[0]); + props.put("_json",values[2]); if (titles.length == values.length) { - for (int i = 2; i < values.length; i++) - props.put(titles[i].split(":")[0].replaceAll("\"\"","\""),convertToJSONArray(values[i])); - + for (int i = 3; i < values.length; i++){ + String[] title = titles[i].split(":"); + if (title.length > 1 && title[1].equals("string[]")) { + props.put(title[0].replaceAll("\"\"","\""),values[i].split("\\|")); + } else + props.put(title[0].replaceAll("\"\"","\""),values[i]); + } } else { System.out.println("titles and values are not equal"); System.out.println("titles: "+titles.length + " - values: " +values.length); } - Map params = new HashMap<>(); params.put( "props", props ); - return params; } @@ -56,7 +54,7 @@ public static String generateRelationCreationQuery(String[] titles, String[] val sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0]+"\'"+"}),") .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2]+"\'"+"}) ") .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") - .append("AND n.ontologyId = '"+values[0].split("\\+")[0]+"' AND m.ontologyId = '"+values[2].split("\\+")[0]+"' ") + .append("AND '"+values[0].split("\\+")[0]+"' IN n.ontologyId AND '"+values[2].split("\\+")[0]+"' IN m.ontologyId ") .append("CREATE (n)-[:") .append("`"+values[1].replace("|","`:`")+"`") .append("]->(m)"); @@ -68,56 +66,10 @@ public static String generateRelationCreationQuery(String[] titles, String[] val return sb.toString(); } - public static String generateRelationCreationQuery2(String[] titles, String[] values){ - StringBuilder sb = new StringBuilder(); - if (titles.length == values.length){ - sb.append("MATCH (n {id: "+"\'"+values[0]+"\'"+"})-[:") - .append("`"+values[1].replace("|","`:`")+"`") - .append("]->(m {id: "+"\'"+values[2]+"\'"+"})"); - } - - return sb.toString(); - } - public static String generateOntologyDeleteQuery(String ontologyPrefix){ return "MATCH (n) where n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; } - public static String convertToJSONArray(String input){ - if (input.contains("|")){ - input = input.substring(1,input.length()-1); - String[] sarray = input.split("\\|"); - StringBuilder sb = new StringBuilder(); - sb.append("["); - for (int i = 0;i ":OntologyClass"; From 435cb7f88d896741f19dcdbe8d8741023b709655 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 12 Aug 2024 16:44:57 +0200 Subject: [PATCH 071/146] added count queries for #29 --- .../java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index 8d92fa53a..bb87e2db4 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -67,7 +67,15 @@ public static String generateRelationCreationQuery(String[] titles, String[] val } public static String generateOntologyDeleteQuery(String ontologyPrefix){ - return "MATCH (n) where n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; + } + + public static String countRelationShipsOfOntology(String ontologyPrefix) { + return "MATCH (n)-[r]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r)"; + } + + public static String countNodesOfOntology(String ontologyPrefix, String type){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' AND '"+type+"' IN n.type return count(n) "; } public static String idToLabel(String id){ From 4df6e36d815e8efcf411d1258bde2125acb5f40f Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 14 Aug 2024 19:48:43 +0200 Subject: [PATCH 072/146] revised command line modes and added metadata summary option to be used in consistency checks for #29 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 105 +++++++++++++++--- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 14 ++- 2 files changed, 99 insertions(+), 20 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 60ebc5255..89d581f7a 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -5,6 +5,7 @@ import org.neo4j.driver.*; import java.io.*; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.concurrent.CountDownLatch; @@ -81,6 +82,60 @@ public static void executeBatchedRelationshipQueries(List files, Driver dr } } + /* + * File should be the _ontologies.csv file + * */ + public static void displayIngested(File file, Driver driver) throws IOException { + System.out.println("---Ingestion Summary---"); + long noofRelationships = 0; + long noofNodes = 0; + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + List records = csvParser.getRecords(); + for (CSVRecord record : records){ + try (Session session = driver.session()){ + String ontology = record.get(0).split("\\+")[0]; + var resultN = session.run(countAllNodesOfOntology(ontology)); + int nodes = resultN.next().get("nodes").asInt(); + noofNodes += nodes; + System.out.println("Number of nodes in ontology "+ontology+" is "+nodes); + var resultR = session.run(countAllRelationshipsOfOntology(ontology)); + int relationships = resultR.next().get("relationships").asInt(); + noofRelationships += relationships; + System.out.println("Number of relationships in ontology "+ontology+" is "+relationships); + } + } + System.out.println("Total number of ingested nodes is "+noofNodes); + System.out.println("Total number of ingested nodes is "+noofRelationships); + } + + public static void displayCSV(List files) throws IOException { + System.out.println("---Ingestion Plan---"); + long noofRelationships = 0; + long noofNodes = 0; + for (File file : files){ + if (file.getName().endsWith("_edges.csv")){ + try { + Path path = Paths.get(file.getAbsolutePath()); + noofRelationships = Files.lines(path).count() -1; + } catch (Exception e) { + e.printStackTrace(); + } + } else if (file.getName().endsWith("_ontologies.csv") || file.getName().endsWith("_properties.csv") || file.getName().endsWith("_classes.csv") || file.getName().endsWith("_individuals.csv")){ + Path path = Paths.get(file.getAbsolutePath()); + Reader reader = Files.newBufferedReader(path); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + int noofRecords = csvParser.getRecords().size(); + int noofNewLines = (int) Files.lines(path).count() -1; + noofNodes += noofRecords; + if (noofRecords != noofNewLines) + System.out.println("Warning: "+noofRecords+" records has been identified in contrast to "+noofNewLines+" new lines in "+file.getName()); + } + } + System.out.println("Total number of nodes that will be ingested in csv: " + noofNodes); + System.out.println("Total Number of relationships that will be ingested in csv: " + noofRelationships); + } + public static List> splitList(List list, int batchSize) { List> subLists = new ArrayList<>(); for (int i = 0; i < list.size(); i += batchSize) { @@ -91,16 +146,16 @@ public static List> splitList(List list, int batchSize) { private static Options getOptions() { Options options = new Options(); - options.addOption("i", "ingest",false, "ingest ontologies"); - options.addOption("rm", "remove",true, "remove ontology by commas"); + options.addOption("m", "mode",true, "ingest(i), remove(rm) or display(d) ontologies"); options.addOption("a", "authenticated",false, "use authentication"); options.addOption("u", "user",true, "neo4j user name"); options.addOption("pw", "password",true, "neo4j user password"); - options.addOption("uri", "databaseuri",true, "neo4j database uri"); + options.addOption("uri", "database_uri",true, "neo4j database uri"); options.addOption("db", "database",true, "neo4j database name"); + options.addOption("o", "ontologies",true, "ontologies to be removed or displayed by commas"); options.addOption("d", "directory",true, "neo4j csv import directory"); - options.addOption("bs", "batchsize",true, "batch size for splitting queries into multiple transactions."); - options.addOption("ps", "pool size",true, "number of threads in the pool"); + options.addOption("bs", "batch_size",true, "batch size for splitting queries into multiple transactions."); + options.addOption("ps", "pool_size",true, "number of threads in the pool"); return options; } @@ -113,7 +168,7 @@ public static void main(String... args) throws IOException, ParseException { final String dbUser = cmd.hasOption("u") ? cmd.getOptionValue("u") : "neo4j"; final String dbPassword = cmd.hasOption("pw") ? cmd.getOptionValue("pw") : "testtest"; final String directory = cmd.hasOption("d") ? cmd.getOptionValue("d") : "/tmp/out"; - final String ontologiesToBeRemoved = cmd.hasOption("rm") ? cmd.getOptionValue("rm") : ""; + final String ontologyPrefixes = cmd.hasOption("o") ? cmd.getOptionValue("o") : ""; final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; final int poolSize = cmd.hasOption("ps") && Integer.parseInt(cmd.getOptionValue("ps"))>0 ? Integer.parseInt(cmd.getOptionValue("ps")) : 20; @@ -144,18 +199,34 @@ public static void main(String... args) throws IOException, ParseException { } System.out.println("Start Neo4J Modification..."); - if(cmd.hasOption("i")){ - File dir = new File(directory); - List files = listFiles(dir.listFiles()); - executeBatchedNodeQueries(files,driver,batchSize,poolSize); - executeBatchedRelationshipQueries(files,driver,batchSize, poolSize); - } else - for(String ontology : ontologiesToBeRemoved.split(",")) - try { - session.run(generateOntologyDeleteQuery(ontology)); - } catch (Exception e){ - e.printStackTrace(); + if(cmd.hasOption("m")){ + if (cmd.getOptionValue("m").equals("i")){ + File dir = new File(directory); + List files = listFiles(dir.listFiles()); + displayCSV(files); + executeBatchedNodeQueries(files,driver,batchSize,poolSize); + executeBatchedRelationshipQueries(files,driver,batchSize, poolSize); + displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).findFirst().get(),driver); + } else if (cmd.getOptionValue("m").equals("rm")){ + for(String ontology : ontologyPrefixes.split(",")){ + try { + session.run(generateOntologyDeleteQuery(ontology)); + } catch (Exception e){ + e.printStackTrace(); + } } + + } else if (cmd.getOptionValue("m").equals("d")){ + for(String ontology : ontologyPrefixes.split(",")){ + var resultN = session.run(countAllNodesOfOntology(ontology)); + System.out.println("Number of nodes in ontology "+ontology+" is "+resultN.next().get("nodes").asInt()); + var resultR = session.run(countAllRelationshipsOfOntology(ontology)); + System.out.println("Number of relationships in ontology "+ontology+" is "+resultR.next().get("relationships").asInt()); + } + } + } else { + System.out.println("Mode should be i, d, or rm"); + } } catch (InterruptedException e) { throw new RuntimeException(e); } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index bb87e2db4..b18053072 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -70,12 +70,20 @@ public static String generateOntologyDeleteQuery(String ontologyPrefix){ return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; } - public static String countRelationShipsOfOntology(String ontologyPrefix) { - return "MATCH (n)-[r]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r)"; + public static String countAllRelationshipsOfOntology(String ontologyPrefix) { + return "MATCH (n)-[r]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r) as relationships"; + } + + public static String countRelationshipsOfOntology(String ontologyPrefix, String label) { + return "MATCH (n)-[r:`"+label+"`]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r) as relationships"; + } + + public static String countAllNodesOfOntology(String ontologyPrefix){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' return count(n) as nodes"; } public static String countNodesOfOntology(String ontologyPrefix, String type){ - return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' AND '"+type+"' IN n.type return count(n) "; + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' AND '"+type+"' IN n.type return count(n) as nodes"; } public static String idToLabel(String id){ From 0fb59195f1eaec2b480983e09bf12bd5dea3b5a1 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:32:28 +0200 Subject: [PATCH 073/146] implemented configurable multiple attempts for each transaction in #29 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 18 ++++----- .../spot/csv2neo/NodeCreationQueryTask.java | 37 ++++++++++--------- .../RelationShipCreationQueryTask.java | 35 ++++++++++-------- 3 files changed, 47 insertions(+), 43 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 89d581f7a..8ff783cf1 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -26,9 +26,6 @@ */ public class ImportCSV { - static FileReader fr; - static BufferedReader br; - public static List listFiles(File[] files) throws IOException { List fileList = new ArrayList(); for (File file : files) { @@ -42,7 +39,7 @@ public static List listFiles(File[] files) throws IOException { return fileList; } - public static void executeBatchedNodeQueries(List files, Driver driver, int batchSize, int poolSize) throws IOException, InterruptedException { + public static void executeBatchedNodeQueries(List files, Driver driver, int batchSize, int poolSize, int attempts) throws IOException, InterruptedException { for (File file : files) { if (!(file.getName().contains("_ontologies") || file.getName().contains("_properties") || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) @@ -54,7 +51,7 @@ public static void executeBatchedNodeQueries(List files, Driver driver, in CountDownLatch latch = new CountDownLatch(splitRecords.size()); ExecutorService executorService = Executors.newFixedThreadPool(poolSize); for (List records : splitRecords){ - NodeCreationQueryTask task = new NodeCreationQueryTask(driver,latch, records,headers,file); + NodeCreationQueryTask task = new NodeCreationQueryTask(driver,latch, records,headers,file,attempts); executorService.submit(task); } latch.await(); @@ -62,7 +59,7 @@ public static void executeBatchedNodeQueries(List files, Driver driver, in } } - public static void executeBatchedRelationshipQueries(List files, Driver driver, int batchSize, int poolSize) throws IOException, InterruptedException { + public static void executeBatchedRelationshipQueries(List files, Driver driver, int batchSize, int poolSize, int attempts) throws IOException, InterruptedException { for (File file : files) { if ((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) continue; @@ -74,7 +71,7 @@ public static void executeBatchedRelationshipQueries(List files, Driver dr CountDownLatch latch = new CountDownLatch(splitRecords.size()); ExecutorService executorService = Executors.newFixedThreadPool(poolSize); for (List records : splitRecords){ - RelationShipCreationQueryTask task = new RelationShipCreationQueryTask(driver,latch,records,headers,file); + RelationShipCreationQueryTask task = new RelationShipCreationQueryTask(driver,latch,records,headers,file, attempts); executorService.submit(task); } latch.await(); @@ -156,6 +153,7 @@ private static Options getOptions() { options.addOption("d", "directory",true, "neo4j csv import directory"); options.addOption("bs", "batch_size",true, "batch size for splitting queries into multiple transactions."); options.addOption("ps", "pool_size",true, "number of threads in the pool"); + options.addOption("t", "attempts",true, "number of attempts for a particular batch"); return options; } @@ -171,6 +169,7 @@ public static void main(String... args) throws IOException, ParseException { final String ontologyPrefixes = cmd.hasOption("o") ? cmd.getOptionValue("o") : ""; final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; final int poolSize = cmd.hasOption("ps") && Integer.parseInt(cmd.getOptionValue("ps"))>0 ? Integer.parseInt(cmd.getOptionValue("ps")) : 20; + final int attempts = cmd.hasOption("t") ? Integer.parseInt(cmd.getOptionValue("t")) : 5; try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); @@ -204,8 +203,8 @@ public static void main(String... args) throws IOException, ParseException { File dir = new File(directory); List files = listFiles(dir.listFiles()); displayCSV(files); - executeBatchedNodeQueries(files,driver,batchSize,poolSize); - executeBatchedRelationshipQueries(files,driver,batchSize, poolSize); + executeBatchedNodeQueries(files,driver,batchSize,poolSize,attempts); + executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).findFirst().get(),driver); } else if (cmd.getOptionValue("m").equals("rm")){ for(String ontology : ontologyPrefixes.split(",")){ @@ -215,7 +214,6 @@ public static void main(String... args) throws IOException, ParseException { e.printStackTrace(); } } - } else if (cmd.getOptionValue("m").equals("d")){ for(String ontology : ontologyPrefixes.split(",")){ var resultN = session.run(countAllNodesOfOntology(ontology)); diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java index 60c65367f..aebd96451 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java @@ -5,7 +5,6 @@ import org.neo4j.driver.Session; import java.io.File; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; @@ -25,33 +24,37 @@ public class NodeCreationQueryTask implements Runnable { private final List records; private final String[] headers; private final File file; + private final int attempts; - public NodeCreationQueryTask(Driver driver,CountDownLatch latch, List records, String[] headers, File file) { + public NodeCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file, int attempts) { this.driver = driver; this.latch = latch; this.records = records; this.headers = headers; this.file = file; - + this.attempts = attempts; } @Override public void run() { - try (Session session = driver.session()) { - session.writeTransaction(tx -> { - for (CSVRecord csvRecord : records) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateBlankNodeCreationQuery(headers,row); - Map params = generateProps(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+ Arrays.toString(row)+" in file: "+file); - else - tx.run(query,params); + boolean success = false; + for(int i = 0;i { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateBlankNodeCreationQuery(headers, row); + Map params = generateProps(headers, row); + tx.run(query, params); + } + return true; + }); + latch.countDown(); } - return null; - }); - } finally { - latch.countDown(); + } catch(Exception e) { + System.out.println("Attempt "+i+" error: "+e.getMessage()); + } } } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java index 0866b7d8a..96b07d1be 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java @@ -5,9 +5,7 @@ import org.neo4j.driver.Session; import java.io.File; -import java.util.Arrays; import java.util.List; -import java.util.Map; import java.util.concurrent.CountDownLatch; import static uk.ac.ebi.spot.csv2neo.QueryGeneration.*; @@ -24,31 +22,36 @@ public class RelationShipCreationQueryTask implements Runnable { private final List records; private final String[] headers; private final File file; + private final int attempts; - public RelationShipCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file) { + public RelationShipCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file, int attempts) { this.driver = driver; this.latch = latch; this.records = records; this.headers = headers; this.file = file; + this.attempts = attempts; } @Override public void run() { - try (Session session = driver.session()) { - session.writeTransaction(tx -> { - for (CSVRecord csvRecord : records) { - String[] row = csvRecord.toList().toArray(String[]::new); - String query = generateRelationCreationQuery(headers,row); - if(query.isEmpty()) - System.out.println("empty query for appended line: "+ Arrays.toString(row)+" in file: "+file); - else - tx.run(query); + boolean success = false; + for(int i = 0;i { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateRelationCreationQuery(headers,row); + tx.run(query); + } + return true; + }); + latch.countDown(); } - return null; - }); - }finally { - latch.countDown(); + } catch(Exception e){ + System.out.println("Attempt "+i+" error: "+e.getMessage()); + } } } } From 76b99999781d269b4843ef70e6ec1b2f0fc61d98 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 22 Aug 2024 13:35:25 +0200 Subject: [PATCH 074/146] converted the rest statistics functionality from ols3 to ols4 for #10 --- backend/pom.xml | 22 +- .../spot/ols/config/RestCallInterceptor.java | 31 +++ .../uk/ac/ebi/spot/ols/config/WebConfig.java | 30 ++- .../controller/KeyValueResultAssembler.java | 18 ++ .../controller/RestCallAssembler.java | 18 ++ .../controller/RestCallStatistics.java | 220 ++++++++++++++++++ .../reststatistics/dto/KeyValueResultDto.java | 30 +++ .../dto/RestCallCountResultDto.java | 22 ++ .../ols/reststatistics/dto/RestCallDto.java | 68 ++++++ .../reststatistics/dto/RestCallRequest.java | 90 +++++++ .../entity/HttpServletRequestInfo.java | 69 ++++++ .../ols/reststatistics/entity/RestCall.java | 95 ++++++++ .../entity/RestCallParameter.java | 92 ++++++++ .../entity/RestCallParameterType.java | 26 +++ .../repository/RestCallRepository.java | 9 + .../repository/RestCallRepositoryCustom.java | 16 ++ .../repository/RestCallRepositoryImpl.java | 116 +++++++++ .../service/RestCallHandlerService.java | 7 + .../service/RestCallParserService.java | 9 + .../service/RestCallService.java | 22 ++ .../service/RestCallStatisticsService.java | 21 ++ .../impl/RestCallHandlerServiceImpl.java | 43 ++++ .../impl/RestCallParserServiceImpl.java | 112 +++++++++ .../service/impl/RestCallServiceImpl.java | 55 +++++ .../impl/RestCallStatisticsServiceImpl.java | 98 ++++++++ .../service/impl/UrlCyclicDecoder.java | 35 +++ .../src/main/resources/application.properties | 7 +- docker-compose.yml | 16 +- 28 files changed, 1374 insertions(+), 23 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java diff --git a/backend/pom.xml b/backend/pom.xml index f56de4e70..a17640a3e 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -32,11 +32,23 @@ 2.8.9 - - org.neo4j.driver - neo4j-java-driver - 5.19.0 - + + org.neo4j.driver + neo4j-java-driver + 5.19.0 + + + + org.springframework.boot + spring-boot-starter-data-mongodb + 2.7.5 + + + + javax.persistence + javax.persistence-api + 2.2 + org.springframework.boot diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java b/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java new file mode 100644 index 000000000..c019d6984 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java @@ -0,0 +1,31 @@ +package uk.ac.ebi.spot.ols.config; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.servlet.HandlerInterceptor; + +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; + +public class RestCallInterceptor implements HandlerInterceptor { + private final RestCallHandlerService restCallHandlerService; + + @Autowired + public RestCallInterceptor(RestCallHandlerService restCallHandlerService) { + this.restCallHandlerService = restCallHandlerService; + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { + if (!request.getRequestURL().toString().contains("/api") + || request.getRequestURL().toString().contains("/api/rest/statistics")) { + return true; + } + + + restCallHandlerService.handle(request); + + return true; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java index c67a52c02..640772f91 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java @@ -2,13 +2,9 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.http.MediaType; -import org.springframework.web.filter.CharacterEncodingFilter; import org.springframework.web.servlet.config.annotation.*; -import org.springframework.web.util.UrlPathHelper; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; /** * @author Simon Jupp @@ -25,6 +21,10 @@ public class WebConfig extends WebMvcConfigurerAdapter { * * @param configurer */ + + @Autowired + RestCallHandlerService restCallHandlerService; + @Override public void configurePathMatch(PathMatchConfigurer configurer) { // UrlPathHelper urlPathHelper = new UrlPathHelper(); @@ -36,17 +36,15 @@ public void configurePathMatch(PathMatchConfigurer configurer) { } -// @Bean -// MaintenanceInterceptor getMaintenanceInterceptor() { -// return new MaintenanceInterceptor(); -// } - -// @Autowired -// MaintenanceInterceptor interceptor; -// @Override -// public void addInterceptors(InterceptorRegistry registry) { -// registry.addInterceptor(interceptor); -// } + @Override + public void addInterceptors(InterceptorRegistry registry) { + registry.addInterceptor(getRestCallInterceptor()); + } + + @Bean + public RestCallInterceptor getRestCallInterceptor() { + return new RestCallInterceptor(restCallHandlerService); + } @Override public void addCorsMappings(CorsRegistry registry) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java new file mode 100644 index 000000000..01632862e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java @@ -0,0 +1,18 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import org.springframework.hateoas.EntityModel; +import org.springframework.hateoas.server.RepresentationModelAssembler; +import org.springframework.stereotype.Component; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; + +@Component +public class KeyValueResultAssembler implements RepresentationModelAssembler> { + + @Override + public EntityModel toModel(KeyValueResultDto document) { + EntityModel resource = EntityModel.of(document); + + return resource; + } +} \ No newline at end of file diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java new file mode 100644 index 000000000..9f95b24a9 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java @@ -0,0 +1,18 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import org.springframework.hateoas.EntityModel; +import org.springframework.hateoas.server.RepresentationModelAssembler; +import org.springframework.stereotype.Component; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; + +@Component +public class RestCallAssembler implements RepresentationModelAssembler> { + + @Override + public EntityModel toModel(RestCallDto document) { + EntityModel resource = EntityModel.of(document); + + return resource; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java new file mode 100644 index 000000000..d9b4e2eca --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java @@ -0,0 +1,220 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PageableDefault; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.format.annotation.DateTimeFormat; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallStatisticsService; +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; + +@RestController +@RequestMapping("/api/rest/statistics") +public class RestCallStatistics { + public static final int DEFAULT_PAGE_SIZE = 20; + + private final RestCallService restCallService; + private final RestCallStatisticsService restCallStatisticsService; + private final RestCallAssembler restCallAssembler; + private final KeyValueResultAssembler keyValueResultAssembler; + + @Autowired + public RestCallStatistics(RestCallService restCallService, + RestCallStatisticsService restCallStatisticsService, + RestCallAssembler restCallAssembler, + KeyValueResultAssembler keyValueResultAssembler) { + this.restCallService = restCallService; + this.restCallStatisticsService = restCallStatisticsService; + this.restCallAssembler = restCallAssembler; + this.keyValueResultAssembler = keyValueResultAssembler; + } + + @Operation(summary = "REST Calls List") + @RequestMapping(value = "", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getList( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + + RestCallRequest request = new RestCallRequest(url, dateTimeFrom, dateTimeTo); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallService.getList(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, restCallAssembler), HttpStatus.OK); + } + + @Operation(summary = "All REST Calls List") + @RequestMapping(value = "/all", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public List getList() { + return restCallService.findAll(); + } + + @Operation(summary = "REST Calls statistics by URL") + @RequestMapping(value = "/byUrl", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByUrl( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getRestCallsCountsByAddress(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } + + @Operation(summary = "REST Calls total count") + @RequestMapping(value = "/count", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity getTotalCount( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList + ) { + RestCallRequest request = new RestCallRequest( + url, + dateTimeFrom, + dateTimeTo + ); + + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + KeyValueResultDto counts = restCallStatisticsService.getRestCallsTotalCount(request,parameters,intersection); + + return new ResponseEntity<>(counts, HttpStatus.OK); + } + + @Operation(summary = "REST Calls statistics by query parameters and path variables") + @RequestMapping(value = "/byParameter", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByParameter( + @Parameter(description = "Parameter type") + @RequestParam(name = "type", required = false) RestCallParameterType type, + @RequestParam(name = "url", required = false) String url, + @Parameter(description = "Parameter name") + @RequestParam(name = "parameter", required = false) String parameter, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + Optional.ofNullable(type), + Optional.ofNullable(parameter), + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getStatisticsByParameter(request, parameters, intersection,pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } + + @Operation(summary = "REST Calls statistics by date") + @RequestMapping(value = "/byDate", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByDate( + @Parameter(description = "Parameter type") + @RequestParam(name = "type", required = false) RestCallParameterType type, + @RequestParam(name = "url", required = false) String url, + @Parameter(description = "Parameter name") + @RequestParam(name = "parameter", required = false) String parameter, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + Optional.ofNullable(type), + Optional.ofNullable(parameter), + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getStatisticsByDate(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java new file mode 100644 index 000000000..f6c24fe7e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java @@ -0,0 +1,30 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +public class KeyValueResultDto { + private String key; + private long value; + + public KeyValueResultDto() { + } + + public KeyValueResultDto(String key, long value) { + this.key = key; + this.value = value; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public long getValue() { + return value; + } + + public void setValue(long value) { + this.value = value; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java new file mode 100644 index 000000000..c9afdf130 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java @@ -0,0 +1,22 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.util.List; + +public class RestCallCountResultDto { + List result; + + public RestCallCountResultDto() { + } + + public RestCallCountResultDto(List result) { + this.result = result; + } + + public List getResult() { + return result; + } + + public void setResult(List result) { + this.result = result; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java new file mode 100644 index 000000000..32a909af3 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java @@ -0,0 +1,68 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.time.LocalDateTime; +import java.util.Set; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public class RestCallDto { + private String id; + private String url; + private Set parameters; + private LocalDateTime createdAt; + + public RestCallDto() { + } + + public RestCallDto(String id, + String url, + Set parameters, + LocalDateTime createdAt) { + this.id = id; + this.url = url; + this.parameters = parameters; + this.createdAt = createdAt; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Set getParameters() { + return parameters; + } + + public void setParameters(Set parameters) { + this.parameters = parameters; + } + + public LocalDateTime getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public static RestCallDto of(RestCall restCall) { + return new RestCallDto( + restCall.getId(), + restCall.getUrl(), + restCall.getParameters(), + restCall.getCreatedAt() + ); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java new file mode 100644 index 000000000..96d184662 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java @@ -0,0 +1,90 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.time.LocalDateTime; +import java.util.Optional; +import java.util.function.Predicate; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; + +public class RestCallRequest { + private String url; + private Optional type; + private Optional parameterName; + + private LocalDateTime dateTimeFrom; + private LocalDateTime dateTimeTo; + + public RestCallRequest() { + } + + public RestCallRequest(String url, LocalDateTime dateTimeFrom, LocalDateTime dateTimeTo) { + this.url = url; + this.dateTimeFrom = dateTimeFrom; + this.dateTimeTo = dateTimeTo; + } + + public RestCallRequest(String url, + Optional type, + Optional parameterName, + LocalDateTime dateTimeFrom, + LocalDateTime dateTimeTo) { + this.url = url; + this.type = type; + this.parameterName = parameterName; + this.dateTimeFrom = dateTimeFrom; + this.dateTimeTo = dateTimeTo; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public LocalDateTime getDateTimeFrom() { + return dateTimeFrom; + } + + public void setDateTimeFrom(LocalDateTime dateTimeFrom) { + this.dateTimeFrom = dateTimeFrom; + } + + public LocalDateTime getDateTimeTo() { + return dateTimeTo; + } + + public void setDateTimeTo(LocalDateTime dateTimeTo) { + this.dateTimeTo = dateTimeTo; + } + + public Optional getType() { + return type; + } + + public void setType(Optional type) { + this.type = type; + } + + public Optional getParameterName() { + return parameterName; + } + + public void setParameterName(Optional parameterName) { + this.parameterName = parameterName; + } + + public Predicate getParameterNamePredicate() { + return parameterName.isPresent() + ? parameter -> parameterName.get().equalsIgnoreCase(parameter.getName()) + : parameter -> true; + } + + public Predicate getParameterTypePredicate() { + return type.isPresent() + ? type.get().getRestCallParameterPredicate() + : parameter -> true; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java new file mode 100644 index 000000000..7536fcd5c --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java @@ -0,0 +1,69 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.Objects; +import java.util.Set; + +public class HttpServletRequestInfo { + private String url; + private Set pathVariables; + private Set queryParameters; + private Set headers; + + public HttpServletRequestInfo() { + } + + public HttpServletRequestInfo(String url, + Set pathVariables, + Set queryParameters, + Set headers) { + this.url = url; + this.pathVariables = pathVariables; + this.queryParameters = queryParameters; + this.headers = headers; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Set getPathVariables() { + return pathVariables; + } + + public void setPathVariables(Set pathVariables) { + this.pathVariables = pathVariables; + } + + public Set getQueryParameters() { + return queryParameters; + } + + public void setQueryParameters(Set queryParameters) { + this.queryParameters = queryParameters; + } + + public Set getHeaders() { + return headers; + } + + public void setHeaders(Set headers) { + this.headers = headers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HttpServletRequestInfo that = (HttpServletRequestInfo) o; + return url.equals(that.url) && Objects.equals(pathVariables, that.pathVariables) && Objects.equals(queryParameters, that.queryParameters) && Objects.equals(headers, that.headers); + } + + @Override + public int hashCode() { + return Objects.hash(url, pathVariables, queryParameters, headers); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java new file mode 100644 index 000000000..dc359deba --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java @@ -0,0 +1,95 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Document; + +import java.time.LocalDateTime; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +@Document(collection = "rest_call") +public class RestCall { + @Id + private String id; + + private String url; + + private Set parameters = new HashSet<>(); + + private LocalDateTime createdAt; + + public RestCall() { + } + + public RestCall(String url) { + this.url = url; + this.createdAt = LocalDateTime.now(); + } + + public RestCall(String url, + Set parameters) { + this.url = url; + this.parameters = parameters; + this.createdAt = LocalDateTime.now(); + } + + public void addParameters(Set set) { + parameters.addAll(set); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public LocalDateTime getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public Set getParameters() { + return parameters; + } + + public void setParameters(Set parameters) { + this.parameters = parameters; + } + + @Override + public String toString() { + return "RestCall{" + + "id=" + id + + ", url='" + url + '\'' + + ", parameters=" + parameters + + ", createdAt=" + createdAt + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestCall restCall = (RestCall) o; + return id.equals(restCall.id) && url.equals(restCall.url); + } + + @Override + public int hashCode() { + return Objects.hash(id, url); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java new file mode 100644 index 000000000..b703e20dd --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java @@ -0,0 +1,92 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.Objects; + +import javax.persistence.Transient; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +public class RestCallParameter { + private String name; + private String value; + private RestCallParameterType parameterType; + + public RestCallParameter() { + } + + public RestCallParameter(String name, String value, RestCallParameterType parameterType) { + this.name = name; + this.value = value; + this.parameterType = parameterType; + } + + public RestCallParameter(String name, String value, RestCallParameterType parameterType, RestCall restCall) { + this.name = name; + this.value = value; + this.parameterType = parameterType; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public RestCallParameterType getParameterType() { + return parameterType; + } + + public void setParameterType(RestCallParameterType parameterType) { + this.parameterType = parameterType; + } + + @Override + public String toString() { + return "RestCallParameter{" + + "parameterType='" + parameterType + '\'' + + ", name='" + name + '\'' + + ", value='" + value + '\'' + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestCallParameter that = (RestCallParameter) o; + return name.equals(that.name) && value.equals(that.value) && parameterType == that.parameterType; + } + + @Override + public int hashCode() { + return Objects.hash(name, value, parameterType); + } + + @Transient + @JsonIgnore + public boolean isPathType() { + return RestCallParameterType.PATH.equals(this.parameterType); + } + + @Transient + @JsonIgnore + public boolean isQueryType() { + return RestCallParameterType.QUERY.equals(this.parameterType); + } + + @Transient + @JsonIgnore + public boolean isHeaderType() { + return RestCallParameterType.HEADER.equals(this.parameterType); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java new file mode 100644 index 000000000..e2f9595c5 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java @@ -0,0 +1,26 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.function.Predicate; + +public enum RestCallParameterType { + PATH { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isPathType; + } + }, + QUERY { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isQueryType; + } + }, + HEADER { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isHeaderType; + } + }; + + public abstract Predicate getRestCallParameterPredicate(); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java new file mode 100644 index 000000000..f4990cb78 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java @@ -0,0 +1,9 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import org.springframework.data.mongodb.repository.MongoRepository; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; + +public interface RestCallRepository extends MongoRepository, RestCallRepositoryCustom { + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java new file mode 100644 index 000000000..c607ca684 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java @@ -0,0 +1,16 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +import java.util.List; + +public interface RestCallRepositoryCustom { + + List query(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + Long count(RestCallRequest request, List parameters, boolean intersection); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java new file mode 100644 index 000000000..32e98dcde --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java @@ -0,0 +1,116 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.stereotype.Repository; +import org.springframework.web.util.UriUtils; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +@Repository +public class RestCallRepositoryImpl implements RestCallRepositoryCustom { + private final Logger logger = LoggerFactory.getLogger(getClass()); + + private final MongoTemplate mongoTemplate; + + @Autowired + public RestCallRepositoryImpl(MongoTemplate mongoTemplate) { + this.mongoTemplate = mongoTemplate; + } + + @Override + public List query(RestCallRequest request, List parameters, boolean intersection, Pageable pageable) { + Query query = new Query(); + List criteria = new ArrayList<>(); + + addCriteriaByDates(request, criteria); + addCriteriaByUrl(request, criteria); + if (parameters !=null) + if (parameters.size()>0) + addCriteriaByParameter(request, criteria, parameters, intersection); + + + if (!criteria.isEmpty()) { + query.addCriteria(new Criteria().andOperator(criteria.toArray(new Criteria[0]))); + } + + if (Objects.nonNull(pageable)) { + query.with(pageable); + } + + return mongoTemplate.find(query, RestCall.class); + } + + @Override + public Long count(RestCallRequest request, List parameters, boolean intersection) { + Query query = new Query(); + + List criteria = new ArrayList<>(); + + addCriteriaByDates(request, criteria); + addCriteriaByUrl(request, criteria); + if (parameters !=null) + if (parameters.size()>0) + addCriteriaByParameter(request, criteria, parameters, intersection); + + if (!criteria.isEmpty()) { + query.addCriteria(new Criteria().andOperator(criteria.toArray(new Criteria[0]))); + } + + return mongoTemplate.count(query, RestCall.class); + } + + private void addCriteriaByUrl(RestCallRequest request, List criteria) { + if (request.getUrl() != null) { + String url = getDecodedUrl(request); + criteria.add(Criteria.where("url").is(url)); + } + } + + private void addCriteriaByDates(RestCallRequest request, List criteria) { + if (request.getDateTimeFrom() != null) { + criteria.add(Criteria.where("createdAt").gte(request.getDateTimeFrom())); + } + + if (request.getDateTimeTo() != null) { + criteria.add(Criteria.where("createdAt").lte(request.getDateTimeTo())); + } + } + + private void addCriteriaByParameter(RestCallRequest request, List criteria, List parameters, boolean intersection) { + if (parameters != null) { + if (intersection) + criteria.add(Criteria.where("parameters").all(parameters)); + else + criteria.add(Criteria.where("parameters").in(parameters)); + } + + } + + private String getDecodedUrl(RestCallRequest request) { + if (request.getUrl() == null) { + return null; + } + + String decodedUrl = null; + try { + decodedUrl = UriUtils.decode(request.getUrl(), StandardCharsets.UTF_8.toString()); + } catch (Exception e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + } + + return decodedUrl; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java new file mode 100644 index 000000000..60b51171b --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java @@ -0,0 +1,7 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import javax.servlet.http.HttpServletRequest; + +public interface RestCallHandlerService { + void handle(HttpServletRequest request); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java new file mode 100644 index 000000000..917b6837e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java @@ -0,0 +1,9 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import javax.servlet.http.HttpServletRequest; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; + +public interface RestCallParserService { + HttpServletRequestInfo parse(HttpServletRequest request); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java new file mode 100644 index 000000000..5be46afaa --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java @@ -0,0 +1,22 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public interface RestCallService { + + RestCall save(RestCall entity); + + Page getList(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + List findAll(); + + Long count(RestCallRequest request, List parameters, boolean intersection); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java new file mode 100644 index 000000000..169e59ae9 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java @@ -0,0 +1,21 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public interface RestCallStatisticsService { + Page getRestCallsCountsByAddress(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + KeyValueResultDto getRestCallsTotalCount(RestCallRequest request, List parameters, boolean intersection); + + Page getStatisticsByParameter(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + Page getStatisticsByDate(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java new file mode 100644 index 000000000..2f33f4258 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java @@ -0,0 +1,43 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import javax.servlet.http.HttpServletRequest; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallParserService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; + +@Service +public class RestCallHandlerServiceImpl implements RestCallHandlerService { + private final Logger log = LoggerFactory.getLogger(getClass()); + + private final RestCallParserService restCallParserService; + private final RestCallService restCallService; + + @Autowired + public RestCallHandlerServiceImpl(RestCallParserService restCallParserService, + RestCallService restCallService) { + this.restCallParserService = restCallParserService; + this.restCallService = restCallService; + } + + @Override + public void handle(HttpServletRequest request) { + HttpServletRequestInfo requestInfo = restCallParserService.parse(request); + + RestCall restCall = new RestCall(requestInfo.getUrl()); + restCall.addParameters(requestInfo.getPathVariables()); + restCall.addParameters(requestInfo.getQueryParameters()); + restCall.addParameters(requestInfo.getHeaders()); + + RestCall saved = restCallService.save(restCall); + + log.debug("REST Call: {}", saved); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java new file mode 100644 index 000000000..6d10f7a56 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java @@ -0,0 +1,112 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.web.servlet.HandlerMapping; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallParserService; + +@Service +public class RestCallParserServiceImpl implements RestCallParserService { + private final Logger logger = LoggerFactory.getLogger(getClass()); + private final UrlCyclicDecoder decoder = new UrlCyclicDecoder(); + + @Value("#{'${frontends}'.split(',')}") + private Set frontends = new HashSet<>(); + + @Override + public HttpServletRequestInfo parse(HttpServletRequest request) { + String requestURI = request.getRequestURI(); + requestURI = decoder.decode(requestURI); + + Map pathVariablesMap = (Map) request + .getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE); + + Set pathVariables = new HashSet<>(); + + if (pathVariablesMap != null) + for (Map.Entry entry : pathVariablesMap.entrySet()) { + String parameterName = entry.getKey(); + String parameterValue = decoder.decode(entry.getValue()); + + int startIndex = requestURI.indexOf(parameterValue) - 1; + int endIndex = startIndex + parameterValue.length() + 1; + + if (startIndex >= 0 && requestURI.charAt(startIndex) == '/') { + requestURI = doReplacement(requestURI, parameterName, startIndex, endIndex); + pathVariables.add(new RestCallParameter(parameterName, parameterValue, RestCallParameterType.PATH)); + } + } + + Set queryParameters = new HashSet<>(); + try { + queryParameters = getQueryParameters(request); + } catch (UnsupportedEncodingException e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + } + + Set headers = new HashSet(); + for (Enumeration names = request.getHeaderNames(); names.hasMoreElements();) { + String headerName = (String) names.nextElement(); + if (!headerName.equals("user-agent")) + continue; + + for(Enumeration values = request.getHeaders(headerName); values.hasMoreElements();){ + String headerValue = (String) values.nextElement(); + if(frontends.contains(headerValue)) + headers.add(new RestCallParameter(headerName,headerValue, RestCallParameterType.HEADER)); + } + + } + + return new HttpServletRequestInfo(requestURI, pathVariables, queryParameters, headers); + } + + private String doReplacement(String str, String parameterName, int startIndex, int endIndex) { + return str.substring(0, startIndex + 1) + + String.format("{%s}", parameterName) + + str.substring(endIndex); + } + + private Set getQueryParameters(HttpServletRequest request) throws UnsupportedEncodingException { + Set queryParameters = new HashSet<>(); + + String queryString = request.getQueryString(); + if (StringUtils.isEmpty(queryString)) { + return queryParameters; + } + + queryString = URLDecoder.decode(queryString, StandardCharsets.UTF_8.toString()); + String[] parameters = queryString.split("&"); + for (String parameter : parameters) { + String[] keyValuePair = parameter.split("="); + String[] values = null; + if(keyValuePair.length >1) + if(keyValuePair[1] != null) + if (!keyValuePair[1].isEmpty()) + values = keyValuePair[1].split(","); + if (values != null) + for (String value : values) { + queryParameters.add(new RestCallParameter(keyValuePair[0], value, RestCallParameterType.QUERY)); + } + } + + return queryParameters; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java new file mode 100644 index 000000000..b2852b754 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java @@ -0,0 +1,55 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.repository.RestCallRepository; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; + +@Service +public class RestCallServiceImpl implements RestCallService { + private final RestCallRepository restCallRepository; + + @Autowired + public RestCallServiceImpl(RestCallRepository restCallRepository) { + this.restCallRepository = restCallRepository; + } + + @Override + public RestCall save(RestCall entity) { + + return restCallRepository.save(entity); + } + + @Override + public Page getList(RestCallRequest request, List parameters, boolean intersection, Pageable pageable) { + List list = restCallRepository.query(request, parameters, intersection, pageable); + List dtos = list.stream() + .map(RestCallDto::of) + .collect(Collectors.toList()); + + Long count = restCallRepository.count(request, parameters, intersection); + + return new PageImpl<>(dtos, pageable, count); + } + @Override + public List findAll() { + return restCallRepository.findAll(); + } + + @Override + public Long count(RestCallRequest request, List parameters, boolean intersection) { + + return restCallRepository.count(request,parameters,intersection); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java new file mode 100644 index 000000000..dae218e03 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java @@ -0,0 +1,98 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallStatisticsService; + +@Service +public class RestCallStatisticsServiceImpl implements RestCallStatisticsService { + private final RestCallService restCallService; + + @Autowired + public RestCallStatisticsServiceImpl(RestCallService restCallService) { + this.restCallService = restCallService; + } + + @Override + public Page getRestCallsCountsByAddress(RestCallRequest request, + List parameters, boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + Map countsMap = getCountsMap(page); + + List list = countsMap.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + @Override + public KeyValueResultDto getRestCallsTotalCount(RestCallRequest request, List parameters, + boolean intersection) { + Long count = restCallService.count(request, parameters, intersection); + Long value = Optional.ofNullable(count).orElse(0L); + + return new KeyValueResultDto("total", value); + } + + @Override + public Page getStatisticsByParameter(RestCallRequest request, List parameters, + boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + Map parametersWithCountsMap = page.getContent().stream() + .flatMap(restCallDto -> restCallDto.getParameters().stream()) + .filter(request.getParameterNamePredicate()).filter(request.getParameterTypePredicate()) + .collect(Collectors.groupingBy(RestCallParameter::getValue, Collectors.counting())).entrySet().stream() + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).collect(Collectors.toMap( + Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new)); + + List list = parametersWithCountsMap.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + @Override + public Page getStatisticsByDate(RestCallRequest request, List parameters, + boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + LinkedHashMap map = page.getContent().stream() + .collect(Collectors.groupingBy(restCallDto -> restCallDto.getCreatedAt().toLocalDate().toString(), + Collectors.counting())) + .entrySet().stream().sorted(Map.Entry.comparingByKey()).collect(Collectors.toMap(Map.Entry::getKey, + Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new)); + + List list = map.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + private Map getCountsMap(Page page) { + Map addressesWithCountsMap = page.getContent().stream() + .collect(Collectors.groupingBy(RestCallDto::getUrl, Collectors.counting())); + + return addressesWithCountsMap.entrySet().stream().sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, + LinkedHashMap::new)); + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java new file mode 100644 index 000000000..32299cd02 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java @@ -0,0 +1,35 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; + +public class UrlCyclicDecoder { + public static final int URL_DECODE_TIMES = 3; + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + public String decode(String url) { + if (!url.contains("%")) { + return url; + } + + int count = 0; + String decoded = url; + while (decoded.contains("%") && count < URL_DECODE_TIMES) { + try { + decoded = URLDecoder.decode(decoded, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + + return url; + } + count++; + } + + return decoded; + } +} diff --git a/backend/src/main/resources/application.properties b/backend/src/main/resources/application.properties index 022080ffb..22b96ba67 100644 --- a/backend/src/main/resources/application.properties +++ b/backend/src/main/resources/application.properties @@ -2,4 +2,9 @@ spring.jackson.serialization.INDENT_OUTPUT=true springdoc.swagger-ui.path=/swagger-ui-ols4.html springdoc.swagger-ui.operationsSorter=method -springdoc.swagger-ui.disable-swagger-default-url=true \ No newline at end of file +springdoc.swagger-ui.disable-swagger-default-url=true +spring.data.mongodb.host=127.0.0.1 +spring.data.mongodb.port=27017 +spring.data.mongodb.database=ols +spring.data.mongodb.repositories.enabled=true +frontends=TIBCENTRAL,NFDI4CHEM,NFDI4ING diff --git a/docker-compose.yml b/docker-compose.yml index aae324747..d94132fa1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,14 @@ services: - ./testcases:/opt/ols/dataload/testcases:ro #- ./docker_out:/tmp/out:rw command: ./dataload.dockersh ${OLS4_DATALOAD_ARGS:-} + mongo: + image: mongo:3.4.24 + ports: + - 27017:27017 + volumes: + - ols-mongo-data:/data/db + command: + - mongod ols4-solr: image: solr:9.0.0 environment: @@ -46,12 +54,15 @@ services: environment: - OLS_SOLR_HOST=http://ols4-solr:8983 - OLS_NEO4J_HOST=bolt://ols4-neo4j:7687 + - spring.data.mongodb.host=mongo depends_on: - ols4-solr - ols4-neo4j + - mongo links: - ols4-solr - ols4-neo4j + - mongo ols4-frontend: build: context: ./frontend @@ -75,4 +86,7 @@ services: - ols4-backend volumes: ols4-neo4j-data: - ols4-solr-data: \ No newline at end of file + ols4-solr-data: + ols-mongo-data: + + From 30959cb2b321137c417547f24762c3b5638ca56a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 22 Aug 2024 14:53:57 +0200 Subject: [PATCH 075/146] removed the experimentall call that lists all rest calls for #10 --- .../ols/reststatistics/controller/RestCallStatistics.java | 6 ------ 1 file changed, 6 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java index d9b4e2eca..4c9a72302 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java @@ -78,12 +78,6 @@ public HttpEntity> getList( return new ResponseEntity<>(assembler.toModel(page, restCallAssembler), HttpStatus.OK); } - @Operation(summary = "All REST Calls List") - @RequestMapping(value = "/all", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) - public List getList() { - return restCallService.findAll(); - } - @Operation(summary = "REST Calls statistics by URL") @RequestMapping(value = "/byUrl", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) public HttpEntity> getStatisticsByUrl( From 714559407103088b6e89f8965f01eea45ac9b87b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 22 Aug 2024 15:04:57 +0200 Subject: [PATCH 076/146] removed unnecessary import --- .../spot/ols/reststatistics/controller/RestCallStatistics.java | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java index 4c9a72302..f740d2307 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java @@ -20,7 +20,6 @@ import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; -import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; import uk.ac.ebi.spot.ols.reststatistics.service.RestCallStatisticsService; import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; From 096524591c7e16a89db3e1c06b1d558e6967d102 Mon Sep 17 00:00:00 2001 From: deepananbu Date: Mon, 26 Aug 2024 14:03:19 +0200 Subject: [PATCH 077/146] formatted code as it is in EBI --- dataload/rdf2json/pom.xml | 194 +- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 1760 +++++++++-------- .../annotators/DisjointWithAnnotator.java | 76 +- 3 files changed, 1020 insertions(+), 1010 deletions(-) diff --git a/dataload/rdf2json/pom.xml b/dataload/rdf2json/pom.xml index 73d116e73..d16987aa2 100644 --- a/dataload/rdf2json/pom.xml +++ b/dataload/rdf2json/pom.xml @@ -1,13 +1,13 @@ - 4.0.0 + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> + 4.0.0 - uk.ac.ebi.spot - rdf2json - 1.0-SNAPSHOT - jar + uk.ac.ebi.spot + rdf2json + 1.0-SNAPSHOT + jar @@ -31,110 +31,110 @@ guava 31.1-jre - + + + org.slf4j + slf4j-api + 2.0.9 + + + + + ch.qos.logback + logback-classic + 1.4.11 + + org.obolibrary.robot robot-core 1.9.6 - - uk.ac.ebi.spot.ols - ols4-shared - 1.0.0-SNAPSHOT - compile - - + - - 11 - 11 - + + 11 + 11 + - - + + - - org.apache.maven.plugins - maven-shade-plugin - - false + + org.apache.maven.plugins + maven-shade-plugin + + false - - - - uk.ac.ebi.rdf2json.RDF2JSON - - - true - - - - - - false - - + + + uk.ac.ebi.rdf2json.RDF2JSON + + + true + + + + + + false + + - - - *:* - - - META-INF/*.SF - META-INF/*.DSA - META-INF/*.RSA - - - - - - - package - - - shade - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + package + + + shade + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index e0561056e..604198141 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -3,9 +3,6 @@ import com.google.gson.stream.JsonWriter; import org.apache.jena.riot.RDFLanguages; -import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; -import org.semanticweb.owlapi.model.*; import uk.ac.ebi.rdf2json.annotators.*; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; import uk.ac.ebi.rdf2json.properties.*; @@ -18,11 +15,10 @@ import org.apache.jena.riot.system.StreamRDF; import org.apache.jena.sparql.core.Quad; -import javax.net.ssl.HttpsURLConnection; -import java.io.*; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; +import java.io.IOException; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.*; @@ -30,949 +26,967 @@ import static uk.ac.ebi.rdf2json.OntologyNode.NodeType.*; import static uk.ac.ebi.ols.shared.DefinedFields.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; +import org.semanticweb.owlapi.model.*; +import javax.net.ssl.HttpsURLConnection; +import java.io.File; +import java.io.*; +import java.net.HttpURLConnection; +import java.io.FileInputStream; +import java.net.URL; +import java.io.FileNotFoundException; +import java.net.URLConnection; + public class OntologyGraph implements StreamRDF { - public Map config; - public List importUrls = new ArrayList<>(); - public Set languages = new TreeSet<>(); - public long sourceFileTimestamp; - - public int numberOfClasses = 0; - public int numberOfProperties = 0; - public int numberOfIndividuals = 0; - - private RDFParserBuilder createParser(Lang lang) { - - if(lang != null) { - return RDFParser.create() - .forceLang(lang) - .strict(false) - .checking(false); - } else { - return RDFParser.create() - .strict(false) - .checking(false); - } - } - - private void parseRDF(String url, boolean convertToRDF) { - - try { - if (loadLocalFiles && !url.contains("://")) { - System.out.println("Using local file for " + url); - sourceFileTimestamp = new File(url).lastModified(); - createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)) - .source(new FileInputStream(url)).parse(this); - } else { - if (downloadedPath != null) { - String existingDownload = downloadedPath + "/" + urlToFilename(url); - try { - FileInputStream is = new FileInputStream(existingDownload); - System.out.println("Using predownloaded file for " + url); - sourceFileTimestamp = new File(existingDownload).lastModified(); - Lang lang = null; - try { - String existingDownloadMimeType = Files.readString(Paths.get(existingDownload + ".mimetype")); - lang = RDFLanguages.contentTypeToLang(existingDownloadMimeType); - } catch(IOException ignored) { - } - if(lang == null) { - lang = Lang.RDFXML; - } - createParser(lang).source(is).parse(this); - } catch (Exception e) { - System.out.println("Downloading (not predownloaded) " + url); - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); - } - } else { - System.out.println("Downloading (no predownload path provided) " + url); - if (convertToRDF) { - String outputFile = "./src/main/resources/result"; - OWLOntology ont = convertOntologyToRDF(url, outputFile); - OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); - String lang1 = odf.getKey(); - String ext = ".owl"; - if (lang1.contains("Turtle")) - ext = ".ttl"; - url = outputFile + ext; - } - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); - } - } - } catch (FileNotFoundException e) { - throw new RuntimeException(e); - } catch (IOException e) { + public Map config; + public List importUrls = new ArrayList<>(); + public Set languages = new TreeSet<>(); + public long sourceFileTimestamp; + + public int numberOfClasses = 0; + public int numberOfProperties = 0; + public int numberOfIndividuals = 0; + + private static final Logger logger = LoggerFactory.getLogger(OntologyGraph.class); + + private RDFParserBuilder createParser(Lang lang) { + + if(lang != null) { + return RDFParser.create() + .forceLang(lang) + .strict(false) + .checking(false); + } else { + return RDFParser.create() + .strict(false) + .checking(false); + } + } + + private void parseRDF(String url, boolean convertToRDF) { + + try { + if (loadLocalFiles && !url.contains("://")) { + logger.debug("Using local file for {}", url); + sourceFileTimestamp = new File(url).lastModified(); + createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)) + .source(new FileInputStream(url)).parse(this); + } else { + if (downloadedPath != null) { + String existingDownload = downloadedPath + "/" + urlToFilename(url); + try { + FileInputStream is = new FileInputStream(existingDownload); + logger.debug("Using predownloaded file for {}", url); + sourceFileTimestamp = new File(existingDownload).lastModified(); + Lang lang = null; + try { + String existingDownloadMimeType = Files.readString(Paths.get(existingDownload + ".mimetype")); + lang = RDFLanguages.contentTypeToLang(existingDownloadMimeType); + } catch(IOException ignored) { + } + if(lang == null) { + lang = Lang.RDFXML; + } + createParser(lang).source(is).parse(this); + } catch (Exception e) { + logger.error("Downloading (not predownloaded) {}", url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } + } else { + logger.debug("Downloading (no predownload path provided) {}", url); + if (convertToRDF) { + String outputFile = "./src/main/resources/result"; + OWLOntology ont = convertOntologyToRDF(url, outputFile); + OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); + String lang1 = odf.getKey(); + String ext = ".owl"; + if (lang1.contains("Turtle")) + ext = ".ttl"; + url = outputFile + ext; + } + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } + } + } catch (FileNotFoundException e) { throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { + OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); + FileOutputStream fos = null; + OWLOntology ont = null; + InputStream is = null; + URLConnection con = null; + boolean isParserException = false; + try { + boolean isRDF = true; + boolean isDefaultURLFailed = false; + try { + URL tempURL = new URL(url); + con = tempURL.openConnection(); + is = tempURL.openStream(); + } catch (IOException e) { + isDefaultURLFailed = true; + } + if (isDefaultURLFailed) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + } catch (IOException e) { + e.printStackTrace(); + } + } + try { + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e) { + isParserException = true; + } + if (isParserException) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + } catch (IOException e) { + e.printStackTrace(); + } + ont = ontManager.loadOntologyFromOntologyDocument(is); + } + OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); + String lang1 = odf.getKey(); + String ext = ".owl"; + if (lang1.contains("Turtle")) + ext = ".ttl"; + else if (!lang1.contains("RDF")) { + isRDF = false; + OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); + fos = new FileOutputStream(outputFile + ext); + ont.saveOntology(odf1, fos); + } + if (isRDF) { + fos = new FileOutputStream(outputFile + ext); + ont.saveOntology(fos); + } + } catch (OWLOntologyCreationException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } catch (OWLOntologyStorageException e) { + e.printStackTrace(); + } finally { + if (fos != null) + fos.close(); + if (is != null) + is.close(); + } + return ont; + } + private String replaceURLByProtocol(URLConnection con, String url) { + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + return url; + } + + private String urlToFilename(String url) { + return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); + } + + + private boolean loadLocalFiles; + + String downloadedPath; + + + OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath, boolean convertToRDF) { + + this.loadLocalFiles = loadLocalFiles; + this.downloadedPath = downloadedPath; + + long startTime = System.nanoTime(); + + this.config = config; + + languages.add("en"); + + String url = (String) config.get("ontology_purl"); + + if(url == null) { + + Collection> products = + (Collection>) config.get("products"); + + if(products != null) { + for(Map product : products) { + + String purl = (String) product.get("ontology_purl"); + + if(purl != null && purl.endsWith(".owl")) { + url = purl; + break; + } + + } + } + + } + + if(url == null) { + logger.error("Could not determine URL for ontology {}", (String)config.get("id")); + return; + } + + logger.debug("load ontology from: {}", url); + parseRDF(url, convertToRDF); + + // Before we evaluate imports, mark all the nodes so far as not imported + for(String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if(c.uri != null) { + c.properties.addProperty(IMPORTED.getText(), PropertyValueLiteral.fromBoolean("false")); + } + } + + + while(importUrls.size() > 0) { + String importUrl = importUrls.get(0); + importUrls.remove(0); + + logger.debug("import: {}", importUrl); + parseRDF(importUrl, convertToRDF); + } + + // Now the imports are done, mark everything else as imported + for(String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if(c.uri != null) { + if(!c.properties.hasProperty(IMPORTED.getText())) { + c.properties.addProperty(IMPORTED.getText(), PropertyValueLiteral.fromBoolean("true")); + } + } + } + + if(this.ontologyNode == null) { + + //// + //// There was no owl:Ontology. + //// Could be an RDFS "ontology", or schema.org, or just some garbage file that didn't have any ontology in it + //// + + // Fallback 1: look for a single node without an rdf:type (fixes loading dcterms and dc elements rdf files) + + List nodesWithoutTypes = this.nodes.values().stream().filter( + node -> node.uri != null && !node.properties.hasProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) + .collect(Collectors.toList()); + + if(nodesWithoutTypes.size() == 1) { + this.ontologyNode = nodesWithoutTypes.get(0); + } + + if(this.ontologyNode == null) { + + // Fallback 2: fabricate an ontology node using the base_uri (fixes loading Schema.org rdf) + + List baseUris = (List) this.config.get("base_uri"); + + if(baseUris != null) { + this.ontologyNode = new OntologyNode(); + this.ontologyNode.uri = baseUris.get(0); + this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); + this.nodes.put(baseUris.get(0), this.ontologyNode); + } + + if(this.ontologyNode == null) { + + // Fallback 3: fabricate an ontology node using the purl + + String purl = (String)this.config.get("ontology_purl"); + + if(purl != null) { + this.ontologyNode = new OntologyNode(); + this.ontologyNode.uri = purl; + this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); + this.nodes.put(purl, this.ontologyNode); + } + } + } + } + + ontologyNode.properties.addProperty( + "numberOfEntities", PropertyValueLiteral.fromString(Integer.toString( + numberOfClasses + numberOfProperties + numberOfIndividuals))); + + ontologyNode.properties.addProperty( + "numberOfClasses", PropertyValueLiteral.fromString(Integer.toString(numberOfClasses))); + + ontologyNode.properties.addProperty( + "numberOfProperties", PropertyValueLiteral.fromString(Integer.toString(numberOfProperties))); + + ontologyNode.properties.addProperty( + "numberOfIndividuals", PropertyValueLiteral.fromString(Integer.toString(numberOfIndividuals))); + + + if(!noDates) { + String now = java.time.LocalDateTime.now().toString(); + + ontologyNode.properties.addProperty( + "loaded", PropertyValueLiteral.fromString(now)); + + ontologyNode.properties.addProperty( + "sourceFileTimestamp", PropertyValueLiteral.fromString(new Date(sourceFileTimestamp).toString())); + } + + for(String language : languages) { + ontologyNode.properties.addProperty("language", PropertyValueLiteral.fromString(language)); } + + + long endTime = System.nanoTime(); + logger.info("load ontology: {}", ((endTime - startTime) / 1000 / 1000 / 1000)); + + SearchableAnnotationValuesAnnotator.annotateSearchableAnnotationValues(this); + InverseOfAnnotator.annotateInverseOf(this); + NegativePropertyAssertionAnnotator.annotateNegativePropertyAssertions(this); + OboSynonymTypeNameAnnotator.annotateOboSynonymTypeNames(this); // n.b. this one labels axioms so must run before the ReifiedPropertyAnnotator + DirectParentsAnnotator.annotateDirectParents(this); + RelatedAnnotator.annotateRelated(this); + HierarchicalParentsAnnotator.annotateHierarchicalParents(this); // must run after RelatedAnnotator + AncestorsAnnotator.annotateAncestors(this); + HierarchyMetricsAnnotator.annotateHierarchyMetrics(this); // must run after HierarchicalParentsAnnotator + ShortFormAnnotator.annotateShortForms(this); + DefinitionAnnotator.annotateDefinitions(this); + SynonymAnnotator.annotateSynonyms(this); + ReifiedPropertyAnnotator.annotateReifiedProperties(this); + OntologyMetadataAnnotator.annotateOntologyMetadata(this); + HierarchyFlagsAnnotator.annotateHierarchyFlags(this); // must run after DirectParentsAnnotator and HierarchicalParentsAnnotator + IsObsoleteAnnotator.annotateIsObsolete(this); + LabelAnnotator.annotateLabels(this); // must run after ShortFormAnnotator + ConfigurablePropertyAnnotator.annotateConfigurableProperties(this); + PreferredRootsAnnotator.annotatePreferredRoots(this); + DisjointWithAnnotator.annotateDisjointWith(this); + HasIndividualsAnnotator.annotateHasIndividuals(this); + EquivalenceAnnotator.annotateEquivalance(this); } - private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { - OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); - FileOutputStream fos = null; - OWLOntology ont = null; - InputStream is = null; - URLConnection con = null; - boolean isParserException = false; - try { - boolean isRDF = true; - boolean isDefaultURLFailed = false; - - try { - URL tempURL = new URL(url); - con = tempURL.openConnection(); - is = tempURL.openStream(); - } catch (IOException e) { - isDefaultURLFailed = true; - } - if (isDefaultURLFailed) { - url = replaceURLByProtocol(con, url); - try { - is = new URL(url).openStream(); - } catch (IOException e) { - e.printStackTrace(); - } - } - try { - ont = ontManager.loadOntologyFromOntologyDocument(is); - } catch (Exception e) { - isParserException = true; - } - - if (isParserException) { - url = replaceURLByProtocol(con, url); - try { - is = new URL(url).openStream(); - } catch (IOException e) { - e.printStackTrace(); - } - ont = ontManager.loadOntologyFromOntologyDocument(is); - } - - OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); - - String lang1 = odf.getKey(); - String ext = ".owl"; - if (lang1.contains("Turtle")) - ext = ".ttl"; - else if (!lang1.contains("RDF")) { - isRDF = false; - OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); - fos = new FileOutputStream(outputFile + ext); - ont.saveOntology(odf1, fos); - } - if (isRDF) { - fos = new FileOutputStream(outputFile + ext); - ont.saveOntology(fos); - } - - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); - } catch (IOException e) { - e.printStackTrace(); - } catch (OWLOntologyStorageException e) { - e.printStackTrace(); - } finally { - if (fos != null) - fos.close(); - if (is != null) - is.close(); - - } - return ont; - } - - private String replaceURLByProtocol(URLConnection con, String url) { - if (con instanceof HttpsURLConnection) { - url = url.replace("https:", "http:"); - } else if (con instanceof HttpURLConnection) { - url = url.replace("http:", "https:"); - } - return url; - - } - - - private String urlToFilename(String url) { - return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); - } - - - private boolean loadLocalFiles; - - String downloadedPath; - - - OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath, boolean convertToRDF) { - - this.loadLocalFiles = loadLocalFiles; - this.downloadedPath = downloadedPath; - - long startTime = System.nanoTime(); - - this.config = config; - - languages.add("en"); - - String url = (String) config.get("ontology_purl"); - - if(url == null) { - - Collection> products = - (Collection>) config.get("products"); - - if(products != null) { - for(Map product : products) { - - String purl = (String) product.get("ontology_purl"); - - if(purl != null && purl.endsWith(".owl")) { - url = purl; - break; - } - } - } + static final Set classTypes = new TreeSet<>(Set.of(ENTITY, CLASS)); + static final Set dataPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, DATA_PROPERTY)); - } + static final Set objectPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, OBJECT_PROPERTY)); + static final Set annotationPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, ANNOTATION_PROPERTY)); - if(url == null) { - System.out.println("Could not determine URL for ontology " + (String)config.get("id")); - return; - } + static final Set propertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY)); + static final Set individualTypes = new TreeSet<>(Set.of(ENTITY, INDIVIDUAL)); - System.out.println("load ontology from: " + url); - parseRDF(url, convertToRDF); + public void write(JsonWriter writer) throws Throwable { - // Before we evaluate imports, mark all the nodes so far as not imported - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if(c.uri != null) { - c.properties.addProperty(IMPORTED.getText(), PropertyValueLiteral.fromBoolean("false")); - } - } + String ontologyId = ((String) config.get("id")).toLowerCase(); + try { + writer.beginObject(); - while(importUrls.size() > 0) { - String importUrl = importUrls.get(0); - importUrls.remove(0); + writer.name("ontologyId"); + writer.value(ontologyId); - System.out.println("import: " + importUrl); - parseRDF(importUrl, convertToRDF); - } + writer.name("iri"); + writer.value(ontologyNode.uri); - // Now the imports are done, mark everything else as imported - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if(c.uri != null) { - if(!c.properties.hasProperty(IMPORTED.getText())) { - c.properties.addProperty(IMPORTED.getText(), PropertyValueLiteral.fromBoolean("true")); - } - } - } + for (String configKey : config.keySet()) { + Object configVal = config.get(configKey); - if(this.ontologyNode == null) { + // we include this (lowercased) as "ontologyId" rather than "id", + // so that the name "id" doesn't clash with downstream id fields in neo4j/solr + // + if (configKey.equals("id")) + continue; - //// - //// There was no owl:Ontology. - //// Could be an RDFS "ontology", or schema.org, or just some garbage file that didn't have any ontology in it - //// + // already included explicitly above + if (configKey.equals("ontologyId")) + continue; - // Fallback 1: look for a single node without an rdf:type (fixes loading dcterms and dc elements rdf files) + // don't print the iri from the config, we already printed the one from the OWL + // TODO: which one to keep, or should we keep both? + if (configKey.equals("iri")) + continue; - List nodesWithoutTypes = this.nodes.values().stream().filter( - node -> node.uri != null && !node.properties.hasProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")) - .collect(Collectors.toList()); + // annotated as hasPreferredRoot by PreferredRootsAnnotator, no need to duplicate + if (configKey.equals("preferred_root_term")) + continue; - if(nodesWithoutTypes.size() == 1) { - this.ontologyNode = nodesWithoutTypes.get(0); - } + // everything else from the config is stored as a normal property + writer.name(configKey); + writeGenericValue(writer, configVal); + } - if(this.ontologyNode == null) { + writeProperties(writer, ontologyNode.properties, Set.of("ontology")); - // Fallback 2: fabricate an ontology node using the base_uri (fixes loading Schema.org rdf) + writer.name("classes"); + writer.beginArray(); - List baseUris = (List) this.config.get("base_uri"); + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.CLASS)) { + writeNode(writer, c, OntologyNode.NodeType.toString(classTypes)); + } + } - if(baseUris != null) { - this.ontologyNode = new OntologyNode(); - this.ontologyNode.uri = baseUris.get(0); - this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); - this.nodes.put(baseUris.get(0), this.ontologyNode); - } + writer.endArray(); - if(this.ontologyNode == null) { - // Fallback 3: fabricate an ontology node using the purl + writer.name("properties"); + writer.beginArray(); - String purl = (String)this.config.get("ontology_purl"); + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.OBJECT_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(objectPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.ANNOTATION_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(annotationPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.DATA_PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(dataPropertyTypes)); + } else if (c.types.contains(OntologyNode.NodeType.PROPERTY)) { + writeNode(writer, c, OntologyNode.NodeType.toString(propertyTypes)); + } + } - if(purl != null) { - this.ontologyNode = new OntologyNode(); - this.ontologyNode.uri = purl; - this.ontologyNode.types.add(OntologyNode.NodeType.ONTOLOGY); - this.nodes.put(purl, this.ontologyNode); - } - } - } - } + writer.endArray(); - ontologyNode.properties.addProperty( - "numberOfEntities", PropertyValueLiteral.fromString(Integer.toString( - numberOfClasses + numberOfProperties + numberOfIndividuals))); - ontologyNode.properties.addProperty( - "numberOfClasses", PropertyValueLiteral.fromString(Integer.toString(numberOfClasses))); + writer.name("individuals"); + writer.beginArray(); - ontologyNode.properties.addProperty( - "numberOfProperties", PropertyValueLiteral.fromString(Integer.toString(numberOfProperties))); + for (String id : nodes.keySet()) { + OntologyNode c = nodes.get(id); + if (c.uri == null) { + // don't print bnodes at top level + continue; + } + if (c.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + writeNode(writer, c, OntologyNode.NodeType.toString(individualTypes)); + } + } + + writer.endArray(); + + + writer.endObject(); + } catch (Throwable t) { + logger.error("Error in writing ontology with id = {}", ontologyId, t); + throw t; + } + } - ontologyNode.properties.addProperty( - "numberOfIndividuals", PropertyValueLiteral.fromString(Integer.toString(numberOfIndividuals))); + private void writeNode(JsonWriter writer, OntologyNode c, Set types) throws Throwable { - if(!noDates) { - String now = java.time.LocalDateTime.now().toString(); + if(c.types.contains(OntologyNode.NodeType.RDF_LIST)) { - ontologyNode.properties.addProperty( - "loaded", PropertyValueLiteral.fromString(now)); + writer.beginArray(); - ontologyNode.properties.addProperty( - "sourceFileTimestamp", PropertyValueLiteral.fromString(new Date(sourceFileTimestamp).toString())); - } + for(PropertyValue listEntry : RdfListEvaluator.evaluateRdfList(c, this)) { + writePropertyValue(writer, listEntry, null); + } - for(String language : languages) { - ontologyNode.properties.addProperty("language", PropertyValueLiteral.fromString(language)); - } + writer.endArray(); + } else { - long endTime = System.nanoTime(); - System.out.println("load ontology: " + ((endTime - startTime) / 1000 / 1000 / 1000)); + writer.beginObject(); - SearchableAnnotationValuesAnnotator.annotateSearchableAnnotationValues(this); - InverseOfAnnotator.annotateInverseOf(this); - NegativePropertyAssertionAnnotator.annotateNegativePropertyAssertions(this); - OboSynonymTypeNameAnnotator.annotateOboSynonymTypeNames(this); // n.b. this one labels axioms so must run before the ReifiedPropertyAnnotator - DirectParentsAnnotator.annotateDirectParents(this); - RelatedAnnotator.annotateRelated(this); - HierarchicalParentsAnnotator.annotateHierarchicalParents(this); // must run after RelatedAnnotator - AncestorsAnnotator.annotateAncestors(this); - HierarchyMetricsAnnotator.annotateHierarchyMetrics(this); // must run after HierarchicalParentsAnnotator - ShortFormAnnotator.annotateShortForms(this); - DefinitionAnnotator.annotateDefinitions(this); - SynonymAnnotator.annotateSynonyms(this); - ReifiedPropertyAnnotator.annotateReifiedProperties(this); - OntologyMetadataAnnotator.annotateOntologyMetadata(this); - HierarchyFlagsAnnotator.annotateHierarchyFlags(this); // must run after DirectParentsAnnotator and HierarchicalParentsAnnotator - IsObsoleteAnnotator.annotateIsObsolete(this); - LabelAnnotator.annotateLabels(this); // must run after ShortFormAnnotator - ConfigurablePropertyAnnotator.annotateConfigurableProperties(this); - PreferredRootsAnnotator.annotatePreferredRoots(this); - DisjointWithAnnotator.annotateDisjointWith(this); - HasIndividualsAnnotator.annotateHasIndividuals(this); - EquivalenceAnnotator.annotateEquivalance(this); + if (c.uri != null) { + writer.name("iri"); + writer.value(c.uri); + } - } - - - static final Set classTypes = new TreeSet<>(Set.of(ENTITY, CLASS)); - static final Set dataPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, DATA_PROPERTY)); + writeProperties(writer, c.properties, types); + writer.endObject(); + } + } - static final Set objectPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, OBJECT_PROPERTY)); - static final Set annotationPropertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY, ANNOTATION_PROPERTY)); + private void writeProperties(JsonWriter writer, PropertySet properties, Set types) throws Throwable { - static final Set propertyTypes = new TreeSet<>(Set.of(ENTITY, PROPERTY)); - static final Set individualTypes = new TreeSet<>(Set.of(ENTITY, INDIVIDUAL)); + if(types != null) { + writer.name("type"); + writer.beginArray(); + for(String type : types) { + writer.value(type); + } + writer.endArray(); + } - public void write(JsonWriter writer) throws IOException { + // TODO: sort keys, rdf:type should be first ideally + for (String predicate : properties.getPropertyPredicates()) { + + if(types != null && types.contains("ontology") && predicate.equals("ontologyId")) { + // hack to workaround a punning issue. + // if the Ontology is also a Class it will have an ontologyId added by + // the OntologyMetadataAnnotator, but there is already an ontologyId field + // printed as part of the ontology object, so skip this one... + // TODO: fix this as part of the punning refactoring + // + continue; + } + + List values = properties.getPropertyValues(predicate); + + writer.name(predicate); + + if(values.size() == 1) { + writePropertyValue(writer, values.get(0), null); + } else { + writer.beginArray(); + for (PropertyValue value : values) { + writePropertyValue(writer, value, null); + } + writer.endArray(); + } + } + } - writer.beginObject(); - writer.name("ontologyId"); - writer.value(((String) config.get("id")).toLowerCase()); + public void writePropertyValue(JsonWriter writer, PropertyValue value, Set types) throws Throwable { + if (value.axioms.size() > 0) { + // reified + writer.beginObject(); + writer.name("type"); + writer.beginArray(); + writer.value("reification"); + writer.endArray(); + writer.name("value"); + writeValue(writer, value); + writer.name("axioms"); + writer.beginArray(); + for(PropertySet axiom : value.axioms) { + writer.beginObject(); + writeProperties(writer, axiom, null); + writer.endObject(); + } + writer.endArray(); + writer.endObject(); + } else { + // not reified + writeValue(writer, value); + } - writer.name("iri"); - writer.value(ontologyNode.uri); + } - for(String configKey : config.keySet()) { - Object configVal = config.get(configKey); + private boolean isXMLBuiltinDatatype(String uri) { + return uri.startsWith("http://www.w3.org/2001/XMLSchema#"); + } + public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable { + assert (value.axioms == null); + + try { + switch (value.getType()) { + case BNODE: + OntologyNode c = nodes.get(((PropertyValueBNode) value).getId()); + if (c == null) { + // empty bnode values present in some ontologies, see issue #116 + writer.value(""); + } else { + writeNode(writer, c, null); + } + break; + case ID: + break; + case LITERAL: + PropertyValueLiteral literal = (PropertyValueLiteral) value; + if (literal.getDatatype() != null) { + if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#boolean")) { + writer.value(Boolean.valueOf(literal.getValue()).booleanValue()); + } else if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#double")) { + writer.value(Double.valueOf(literal.getValue()).doubleValue()); + } else if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#integer")) { + // Workaround for gsso using "integer" value of 9780304343010. + try { + writer.value(Integer.valueOf(literal.getValue()).intValue()); + } catch (NumberFormatException e) { + writer.value(Double.valueOf(literal.getValue()).doubleValue()); + } + } else if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { + writer.beginObject(); + writer.name("type"); + writer.beginArray(); + writer.value("literal"); + writer.endArray(); + if (!literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { + writer.name("datatype"); + writer.value(literal.getDatatype()); + } + writer.name("value"); + writer.value(literal.getValue()); + if (!literal.getLang().equals("")) { + writer.name("lang"); + writer.value(literal.getLang()); + } + writer.endObject(); + } else { + writer.beginObject(); + writer.name("type"); + writer.beginArray(); + writer.value("literal"); + writer.endArray(); + writer.name("datatype"); + writer.value(literal.getDatatype()); + writer.name("value"); + writer.value(literal.getValue()); + if (!literal.getLang().equals("")) { + writer.name("lang"); + writer.value(literal.getLang()); + } + writer.endObject(); + } + } + break; + case URI: + String uri = ((PropertyValueURI) value).getUri(); + OntologyNode uriNode = nodes.get(uri); + if (uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { + // special case for rdfs:Datatype; nest it as with a bnode instead of referencing + writeNode(writer, uriNode, Set.of("datatype")); + } else { + writer.value(uri); + } + break; + case RELATED: + writer.beginObject(); + writer.name("property"); + writer.value(((PropertyValueRelated) value).getProperty()); + writer.name("value"); + writer.value(((PropertyValueRelated) value).getFiller().uri); + writeProperties(writer, ((PropertyValueRelated) value).getClassExpression().properties, Set.of("related")); + writer.endObject(); + break; + case ANCESTORS: + PropertyValueAncestors ancestors = (PropertyValueAncestors) value; + Set ancestorIris = ancestors.getAncestors(this); + if (ancestorIris.size() == 1) { + writer.value(ancestorIris.iterator().next()); + } else { + writer.beginArray(); + for (String ancestorIri : ancestorIris) { + writer.value(ancestorIri); + } + writer.endArray(); + } + break; + default: + writer.value("?"); + break; + } + } catch (Throwable t) { + logger.error("Error writing property value {}", value, t); + throw t; + } + } - // we include this (lowercased) as "ontologyId" rather than "id", - // so that the name "id" doesn't clash with downstream id fields in neo4j/solr - // - if(configKey.equals("id")) - continue; + public Map nodes = new TreeMap<>(); + public OntologyNode ontologyNode = null; - // already included explicitly above - if(configKey.equals("ontologyId")) - continue; + private OntologyNode getOrCreateNode(Node node) { + String id = nodeIdFromJenaNode(node); + OntologyNode entity = nodes.get(id); + if (entity != null) { + return entity; + } - // don't print the iri from the config, we already printed the one from the OWL - // TODO: which one to keep, or should we keep both? - if(configKey.equals("iri")) - continue; + entity = new OntologyNode(); - // annotated as hasPreferredRoot by PreferredRootsAnnotator, no need to duplicate - if(configKey.equals("preferred_root_term")) - continue; + if(!node.isBlank()) + entity.uri = id; - // everything else from the config is stored as a normal property - writer.name(configKey); - writeGenericValue(writer, configVal); - } + nodes.put(id, entity); + return entity; + } - writeProperties(writer, ontologyNode.properties, Set.of("ontology")); + @Override + public void start() { - writer.name("classes"); - writer.beginArray(); + } - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.CLASS)) { - writeNode(writer, c, OntologyNode.NodeType.toString(classTypes)); - } - } + @Override + public void triple(Triple triple) { - writer.endArray(); + if(triple.getObject().isLiteral()) { + handleLiteralTriple(triple); + } else { + handleNamedNodeTriple(triple); + } + // TODO: BNodes? - writer.name("properties"); - writer.beginArray(); + } - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.OBJECT_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(objectPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.ANNOTATION_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(annotationPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.DATA_PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(dataPropertyTypes)); - } else if (c.types.contains(OntologyNode.NodeType.PROPERTY)) { - writeNode(writer, c, OntologyNode.NodeType.toString(propertyTypes)); - } - } - - writer.endArray(); - - - writer.name("individuals"); - writer.beginArray(); - - for(String id : nodes.keySet()) { - OntologyNode c = nodes.get(id); - if (c.uri == null) { - // don't print bnodes at top level - continue; - } - if (c.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { - writeNode(writer, c, OntologyNode.NodeType.toString(individualTypes)); - } - } - - writer.endArray(); - - - writer.endObject(); - - } - - - private void writeNode(JsonWriter writer, OntologyNode c, Set types) throws IOException { - - if(c.types.contains(OntologyNode.NodeType.RDF_LIST)) { - - writer.beginArray(); - - for(PropertyValue listEntry : RdfListEvaluator.evaluateRdfList(c, this)) { - writePropertyValue(writer, listEntry, null); - } - - writer.endArray(); - - } else { - - writer.beginObject(); - - if (c.uri != null) { - writer.name("iri"); - writer.value(c.uri); - } - - writeProperties(writer, c.properties, types); - writer.endObject(); - } - } - - private void writeProperties(JsonWriter writer, PropertySet properties, Set types) throws IOException { - - if(types != null) { - writer.name("type"); - writer.beginArray(); - for(String type : types) { - writer.value(type); - } - writer.endArray(); - } - - // TODO: sort keys, rdf:type should be first ideally - for (String predicate : properties.getPropertyPredicates()) { - - if(types != null && types.contains("ontology") && predicate.equals("ontologyId")) { - // hack to workaround a punning issue. - // if the Ontology is also a Class it will have an ontologyId added by - // the OntologyMetadataAnnotator, but there is already an ontologyId field - // printed as part of the ontology object, so skip this one... - // TODO: fix this as part of the punning refactoring - // - continue; - } - - List values = properties.getPropertyValues(predicate); - - writer.name(predicate); - - if(values.size() == 1) { - writePropertyValue(writer, values.get(0), null); - } else { - writer.beginArray(); - for (PropertyValue value : values) { - writePropertyValue(writer, value, null); - } - writer.endArray(); - } - } - } - - - public void writePropertyValue(JsonWriter writer, PropertyValue value, Set types) throws IOException { - if (value.axioms.size() > 0) { - // reified - writer.beginObject(); - writer.name("type"); - writer.beginArray(); - writer.value("reification"); - writer.endArray(); - writer.name("value"); - writeValue(writer, value); - writer.name("axioms"); - writer.beginArray(); - for(PropertySet axiom : value.axioms) { - writer.beginObject(); - writeProperties(writer, axiom, null); - writer.endObject(); - } - writer.endArray(); - writer.endObject(); - } else { - // not reified - writeValue(writer, value); - } - - } - - private boolean isXMLBuiltinDatatype(String uri) { - return uri.startsWith("http://www.w3.org/2001/XMLSchema#"); - } - public void writeValue(JsonWriter writer, PropertyValue value) throws IOException { - assert (value.axioms == null); - - switch(value.getType()) { - case BNODE: - OntologyNode c = nodes.get(((PropertyValueBNode) value).getId()); - if (c == null) { - // empty bnode values present in some ontologies, see issue #116 - writer.value(""); - } else { - writeNode(writer, c, null); - } - break; - case ID: - break; - case LITERAL: - PropertyValueLiteral literal = (PropertyValueLiteral) value; - if (literal.getDatatype() != null) { - if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#boolean")) { - writer.value(Boolean.valueOf(literal.getValue()).booleanValue()); - } else if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#double")) { - writer.value(Double.valueOf(literal.getValue()).doubleValue()); - } else if (literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { - writer.beginObject(); - writer.name("type"); - writer.beginArray(); - writer.value("literal"); - writer.endArray(); - if(!literal.getDatatype().equals("http://www.w3.org/2001/XMLSchema#string")) { - writer.name("datatype"); - writer.value(literal.getDatatype()); - } - writer.name("value"); - writer.value(literal.getValue()); - if(!literal.getLang().equals("")) { - writer.name("lang"); - writer.value(literal.getLang()); - } - writer.endObject(); - } else { - writer.beginObject(); - writer.name("type"); - writer.beginArray(); - writer.value("literal"); - writer.endArray(); - writer.name("datatype"); - writer.value(literal.getDatatype()); - writer.name("value"); - writer.value(literal.getValue()); - if(!literal.getLang().equals("")) { - writer.name("lang"); - writer.value(literal.getLang()); - } - writer.endObject(); - } - } - break; - case URI: - String uri = ((PropertyValueURI) value).getUri(); - OntologyNode uriNode = nodes.get(uri); - if(uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { - // special case for rdfs:Datatype; nest it as with a bnode instead of referencing - writeNode(writer, uriNode, Set.of("datatype")); - } else { - writer.value(uri); - } - break; - case RELATED: - writer.beginObject(); - writer.name("property"); - writer.value(((PropertyValueRelated) value).getProperty()); - writer.name("value"); - writer.value(((PropertyValueRelated) value).getFiller().uri); - writeProperties(writer, ((PropertyValueRelated) value).getClassExpression().properties, Set.of("related")); - writer.endObject(); - break; - case ANCESTORS: - PropertyValueAncestors ancestors = (PropertyValueAncestors) value; - Set ancestorIris = ancestors.getAncestors(this); - if(ancestorIris.size() == 1) { - writer.value(ancestorIris.iterator().next()); - } else { - writer.beginArray(); - for(String ancestorIri : ancestorIris) { - writer.value(ancestorIri); - } - writer.endArray(); - } - break; - default: - writer.value("?"); - break; - } - } - - - - - - - public Map nodes = new TreeMap<>(); - public OntologyNode ontologyNode = null; - - private OntologyNode getOrCreateNode(Node node) { - String id = nodeIdFromJenaNode(node); - OntologyNode entity = nodes.get(id); - if (entity != null) { - return entity; - } - - entity = new OntologyNode(); - - if(!node.isBlank()) - entity.uri = id; - - nodes.put(id, entity); - return entity; - } - - @Override - public void start() { - - } - - @Override - public void triple(Triple triple) { - - if(triple.getObject().isLiteral()) { - handleLiteralTriple(triple); - } else { - handleNamedNodeTriple(triple); - } - - // TODO: BNodes? - - } - - - public void handleLiteralTriple(Triple triple) { - - String subjId = nodeIdFromJenaNode(triple.getSubject()); - OntologyNode subjNode = getOrCreateNode(triple.getSubject()); - - String lang = triple.getObject().getLiteralLanguage(); - if(lang != null && !lang.equals("")) { - languages.add(lang); - } - - subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); - - } - - public void handleNamedNodeTriple(Triple triple) { - - OntologyNode subjNode = getOrCreateNode(triple.getSubject()); - - switch (triple.getPredicate().getURI()) { - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": - handleType(subjNode, triple.getObject()); - break; - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest": - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#first": - subjNode.types.add(OntologyNode.NodeType.RDF_LIST); - break; - case "http://www.w3.org/2002/07/owl#imports": - importUrls.add(triple.getObject().getURI()); - break; - } + public void handleLiteralTriple(Triple triple) { - subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); + String subjId = nodeIdFromJenaNode(triple.getSubject()); + OntologyNode subjNode = getOrCreateNode(triple.getSubject()); + String lang = triple.getObject().getLiteralLanguage(); + if(lang != null && !lang.equals("")) { + languages.add(lang); + } - } + subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); - public void handleType(OntologyNode subjNode, Node type) { + } - if(!type.isURI()) - return; + public void handleNamedNodeTriple(Triple triple) { - switch (type.getURI()) { + OntologyNode subjNode = getOrCreateNode(triple.getSubject()); - case "http://www.w3.org/2002/07/owl#Ontology": + switch (triple.getPredicate().getURI()) { + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#type": + handleType(subjNode, triple.getObject()); + break; + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#rest": + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#first": + subjNode.types.add(OntologyNode.NodeType.RDF_LIST); + break; - subjNode.types.add(OntologyNode.NodeType.ONTOLOGY); + case "http://www.w3.org/2002/07/owl#imports": + importUrls.add(triple.getObject().getURI()); + break; + } - if(ontologyNode == null) { - ontologyNode = subjNode; - } + subjNode.properties.addProperty(triple.getPredicate().getURI(), PropertyValue.fromJenaNode(triple.getObject())); + } - break; + public void handleType(OntologyNode subjNode, Node type) { + + if(!type.isURI()) + return; + + switch (type.getURI()) { + + case "http://www.w3.org/2002/07/owl#Ontology": + + subjNode.types.add(OntologyNode.NodeType.ONTOLOGY); + + if(ontologyNode == null) { + ontologyNode = subjNode; + } + + break; + + case "http://www.w3.org/2002/07/owl#Class": + case "http://www.w3.org/2000/01/rdf-schema#Class": + case "http://www.w3.org/2004/02/skos/core#Concept": + subjNode.types.add(OntologyNode.NodeType.CLASS); + if(subjNode.uri != null) { + ++ numberOfClasses; + } + + break; + + case "http://www.w3.org/2002/07/owl#AnnotationProperty": + subjNode.types.add(OntologyNode.NodeType.ANNOTATION_PROPERTY); + addAddAndCountProperties(subjNode); + break; + + case "http://www.w3.org/2002/07/owl#ObjectProperty": + subjNode.types.add(OntologyNode.NodeType.OBJECT_PROPERTY); + addAddAndCountProperties(subjNode); + break; + case "http://www.w3.org/2002/07/owl#DatatypeProperty": + subjNode.types.add(OntologyNode.NodeType.DATA_PROPERTY); + addAddAndCountProperties(subjNode); + break; + case "http://www.w3.org/1999/02/22-rdf-syntax-ns#Property": + addAddAndCountProperties(subjNode); + break; + + case "http://www.w3.org/2002/07/owl#NamedIndividual": + subjNode.types.add(OntologyNode.NodeType.INDIVIDUAL); + + if(subjNode.uri != null) { + ++ numberOfIndividuals; + } + + break; + + case "http://www.w3.org/2002/07/owl#Axiom": + subjNode.types.add(OntologyNode.NodeType.AXIOM); + break; + + case "http://www.w3.org/2002/07/owl#Restriction": + subjNode.types.add(OntologyNode.NodeType.RESTRICTION); + break; + + case "http://www.w3.org/2002/07/owl#AllDisjointClasses": + subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_CLASSES); + break; + case "http://www.w3.org/2002/07/owl#AllDisjointProperties": + subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES); + break; + case "http://www.w3.org/2002/07/owl#AllDifferent": + subjNode.types.add(OntologyNode.NodeType.ALL_DIFFERENT); + break; + case "http://www.w3.org/2002/07/owl#NegativePropertyAssertion": + subjNode.types.add(OntologyNode.NodeType.NEGATIVE_PROPERTY_ASSERTION); + break; + + case "http://www.w3.org/2000/01/rdf-schema#Datatype": + subjNode.types.add(OntologyNode.NodeType.DATATYPE); + break; + } + } - case "http://www.w3.org/2002/07/owl#Class": - case "http://www.w3.org/2000/01/rdf-schema#Class": - case "http://www.w3.org/2004/02/skos/core#Concept": - subjNode.types.add(OntologyNode.NodeType.CLASS); - if(subjNode.uri != null) { - ++ numberOfClasses; - } + private void addAddAndCountProperties(OntologyNode subjNode) { + subjNode.types.add(OntologyNode.NodeType.PROPERTY); - break; + if (subjNode.uri != null) { + ++numberOfProperties; + } + } - case "http://www.w3.org/2002/07/owl#AnnotationProperty": - subjNode.types.add(OntologyNode.NodeType.ANNOTATION_PROPERTY); - addAddAndCountProperties(subjNode); - break; + @Override + public void quad(Quad quad) { - case "http://www.w3.org/2002/07/owl#ObjectProperty": - subjNode.types.add(OntologyNode.NodeType.OBJECT_PROPERTY); - addAddAndCountProperties(subjNode); - break; - case "http://www.w3.org/2002/07/owl#DatatypeProperty": - subjNode.types.add(OntologyNode.NodeType.DATA_PROPERTY); - addAddAndCountProperties(subjNode); - break; - case "http://www.w3.org/1999/02/22-rdf-syntax-ns#Property": - addAddAndCountProperties(subjNode); - break; + } - case "http://www.w3.org/2002/07/owl#NamedIndividual": - subjNode.types.add(OntologyNode.NodeType.INDIVIDUAL); + @Override + public void base(String s) { - if(subjNode.uri != null) { - ++ numberOfIndividuals; - } + } - break; + @Override + public void prefix(String s, String s1) { - case "http://www.w3.org/2002/07/owl#Axiom": - subjNode.types.add(OntologyNode.NodeType.AXIOM); - break; + } + + @Override + public void finish() { + + } + + + public String nodeIdFromJenaNode(Node node) { + if(node.isURI()) { + return node.getURI(); + } + if(node.isBlank()) { + return node.getBlankNodeId().toString(); + } + throw new RuntimeException("unknown node type"); + } + + public String nodeIdFromPropertyValue(PropertyValue node) { + if(node.getType() == PropertyValue.Type.URI) { + return ((PropertyValueURI) node).getUri(); + } + if(node.getType() == PropertyValue.Type.BNODE) { + return ((PropertyValueBNode) node).getId(); + } + throw new RuntimeException("unknown node type"); + } + + + + private static void writeGenericValue(JsonWriter writer, Object val) throws IOException { + + if(val instanceof Collection) { + writer.beginArray(); + for(Object entry : ((Collection) val)) { + writeGenericValue(writer, entry); + } + writer.endArray(); + } else if(val instanceof Map) { + Map map = new TreeMap ( (Map) val ); + writer.beginObject(); + for(String k : map.keySet()) { + writer.name(k); + writeGenericValue(writer, map.get(k)); + } + writer.endObject(); + } else if(val instanceof String) { + writer.value((String) val); + } else if(val instanceof Integer) { + writer.value((Integer) val); + } else if(val instanceof Double) { + writer.value((Double) val); + } else if(val instanceof Long) { + writer.value((Long) val); + } else if(val instanceof Boolean) { + writer.value((Boolean) val); + } else if(val == null) { + writer.nullValue(); + } else { + throw new RuntimeException("Unknown value type"); + } + + } - case "http://www.w3.org/2002/07/owl#Restriction": - subjNode.types.add(OntologyNode.NodeType.RESTRICTION); - break; - - case "http://www.w3.org/2002/07/owl#AllDisjointClasses": - subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_CLASSES); - break; - case "http://www.w3.org/2002/07/owl#AllDisjointProperties": - subjNode.types.add(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES); - break; - case "http://www.w3.org/2002/07/owl#AllDifferent": - subjNode.types.add(OntologyNode.NodeType.ALL_DIFFERENT); - break; - case "http://www.w3.org/2002/07/owl#NegativePropertyAssertion": - subjNode.types.add(OntologyNode.NodeType.NEGATIVE_PROPERTY_ASSERTION); - break; - - case "http://www.w3.org/2000/01/rdf-schema#Datatype": - subjNode.types.add(OntologyNode.NodeType.DATATYPE); - break; - } - } - - private void addAddAndCountProperties(OntologyNode subjNode) { - subjNode.types.add(OntologyNode.NodeType.PROPERTY); - - if (subjNode.uri != null) { - ++numberOfProperties; - } - } - - @Override - public void quad(Quad quad) { - - } - - @Override - public void base(String s) { - - } - - @Override - public void prefix(String s, String s1) { - - } - - @Override - public void finish() { - - } - - - public String nodeIdFromJenaNode(Node node) { - if(node.isURI()) { - return node.getURI(); - } - if(node.isBlank()) { - return node.getBlankNodeId().toString(); - } - throw new RuntimeException("unknown node type"); - } - - public String nodeIdFromPropertyValue(PropertyValue node) { - if(node.getType() == PropertyValue.Type.URI) { - return ((PropertyValueURI) node).getUri(); - } - if(node.getType() == PropertyValue.Type.BNODE) { - return ((PropertyValueBNode) node).getId(); - } - throw new RuntimeException("unknown node type"); - } - - - - private static void writeGenericValue(JsonWriter writer, Object val) throws IOException { - - if(val instanceof Collection) { - writer.beginArray(); - for(Object entry : ((Collection) val)) { - writeGenericValue(writer, entry); - } - writer.endArray(); - } else if(val instanceof Map) { - Map map = new TreeMap ( (Map) val ); - writer.beginObject(); - for(String k : map.keySet()) { - writer.name(k); - writeGenericValue(writer, map.get(k)); - } - writer.endObject(); - } else if(val instanceof String) { - writer.value((String) val); - } else if(val instanceof Integer) { - writer.value((Integer) val); - } else if(val instanceof Double) { - writer.value((Double) val); - } else if(val instanceof Long) { - writer.value((Long) val); - } else if(val instanceof Boolean) { - writer.value((Boolean) val); - } else if(val == null) { - writer.nullValue(); - } else { - throw new RuntimeException("Unknown value type"); - } - - } - - - public boolean areSubgraphsIsomorphic(PropertyValue rootNodeA, PropertyValue rootNodeB) { - - OntologyNode a = nodes.get(nodeIdFromPropertyValue(rootNodeA)); - OntologyNode b = nodes.get(nodeIdFromPropertyValue(rootNodeB)); - - if(! a.properties.getPropertyPredicates().equals( b.properties.getPropertyPredicates() )) { - return false; - } - - for(String predicate : a.properties.getPropertyPredicates()) { - List valuesA = a.properties.getPropertyValues(predicate); - List valuesB = b.properties.getPropertyValues(predicate); - - if(valuesA.size() != valuesB.size()) - return false; - - for(int n = 0; n < valuesA.size(); ++ n) { - PropertyValue valueA = valuesA.get(n); - PropertyValue valueB = valuesB.get(n); - - if(valueA.getType() != PropertyValue.Type.BNODE) { - // non bnode value, simple case - if(!valueA.equals(valueB)) { - return false; - } - } - - // bnode value - - if(valueB.getType() != PropertyValue.Type.BNODE) - return false; - - if(!areSubgraphsIsomorphic(valueA, valueB)) - return false; - } - } - - return true; - } - - - public OntologyNode getNodeForPropertyValue(PropertyValue value) { - - switch(value.getType()) { - case URI: - return nodes.get( ((PropertyValueURI) value).getUri() ); - case BNODE: - return nodes.get( ((PropertyValueBNode) value).getId() ); - default: - throw new RuntimeException("not a node"); - } - } + + public boolean areSubgraphsIsomorphic(PropertyValue rootNodeA, PropertyValue rootNodeB) { + + OntologyNode a = nodes.get(nodeIdFromPropertyValue(rootNodeA)); + OntologyNode b = nodes.get(nodeIdFromPropertyValue(rootNodeB)); + + if(! a.properties.getPropertyPredicates().equals( b.properties.getPropertyPredicates() )) { + return false; + } + + for(String predicate : a.properties.getPropertyPredicates()) { + List valuesA = a.properties.getPropertyValues(predicate); + List valuesB = b.properties.getPropertyValues(predicate); + + if(valuesA.size() != valuesB.size()) + return false; + + for(int n = 0; n < valuesA.size(); ++ n) { + PropertyValue valueA = valuesA.get(n); + PropertyValue valueB = valuesB.get(n); + + if(valueA.getType() != PropertyValue.Type.BNODE) { + // non bnode value, simple case + if(!valueA.equals(valueB)) { + return false; + } + } + + // bnode value + + if(valueB.getType() != PropertyValue.Type.BNODE) + return false; + + if(!areSubgraphsIsomorphic(valueA, valueB)) + return false; + } + } + + return true; + } + + + public OntologyNode getNodeForPropertyValue(PropertyValue value) { + + switch(value.getType()) { + case URI: + return nodes.get( ((PropertyValueURI) value).getUri() ); + case BNODE: + return nodes.get( ((PropertyValueBNode) value).getId() ); + default: + throw new RuntimeException("not a node"); + } + } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java index 6476954dd..56236f303 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java @@ -1,86 +1,82 @@ package uk.ac.ebi.rdf2json.annotators; -import java.util.*; -import java.util.stream.Collectors; - +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import uk.ac.ebi.rdf2json.OntologyGraph; import uk.ac.ebi.rdf2json.OntologyNode; -import uk.ac.ebi.rdf2json.OntologyNode.NodeType; -import uk.ac.ebi.rdf2json.annotators.helpers.PropertyCollator; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; import uk.ac.ebi.rdf2json.properties.PropertyValue; import uk.ac.ebi.rdf2json.properties.PropertyValueURI; +import java.util.List; +import java.util.stream.Collectors; + public class DisjointWithAnnotator { + private static final Logger logger = LoggerFactory.getLogger(DisjointWithAnnotator.class); public static void annotateDisjointWith(OntologyGraph graph) { long startTime3 = System.nanoTime(); - for (String id : graph.nodes.keySet()) { + for(String id : graph.nodes.keySet()) { OntologyNode c = graph.nodes.get(id); if (c.types.contains(OntologyNode.NodeType.ALL_DISJOINT_CLASSES)) { - PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); - List members = RdfListEvaluator - .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); + List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - List classNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) - .collect(Collectors.toList()); + List classNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)).collect(Collectors.toList()); - for (OntologyNode classNodeA : classNodes) { - for (OntologyNode classNodeB : classNodes) { - if (classNodeA != null && classNodeB != null && classNodeB.uri != classNodeA.uri) { + for(OntologyNode classNodeA : classNodes) { + for(OntologyNode classNodeB : classNodes) { + if(classNodeA != null && classNodeB != null && classNodeB.uri != classNodeA.uri) { classNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#disjointWith", - PropertyValueURI.fromUri(classNodeB.uri)); + PropertyValueURI.fromUri(classNodeB.uri)); } } } } else if (c.types.contains(OntologyNode.NodeType.ALL_DISJOINT_PROPERTIES)) { - PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); - List members = RdfListEvaluator - .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#members"); + List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - List propertyNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) - .collect(Collectors.toList()); + List propertyNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)).collect(Collectors.toList()); - for (OntologyNode propertyNodeA : propertyNodes) { - for (OntologyNode propertyNodeB : propertyNodes) { - if (propertyNodeB.uri != propertyNodeA.uri) { + for(OntologyNode propertyNodeA : propertyNodes) { + for(OntologyNode propertyNodeB : propertyNodes) { + if(propertyNodeB.uri != propertyNodeA.uri) { propertyNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#propertyDisjointWith", - PropertyValueURI.fromUri(propertyNodeB.uri)); + PropertyValueURI.fromUri(propertyNodeB.uri)); } } } - + } else if (c.types.contains(OntologyNode.NodeType.ALL_DIFFERENT)) { - PropertyValue membersList = c.properties - .getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); + PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); + List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); if (membersList != null) { - List members = RdfListEvaluator - .evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - - List individualNodes = members.stream().map(val -> graph.getNodeForPropertyValue(val)) - .filter(val -> val != null).collect(Collectors.toList()); - - for (OntologyNode individualNodeA : individualNodes) { - for (OntologyNode individualNodeB : individualNodes) { - if (individualNodeB.uri != individualNodeA.uri) { - individualNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#differentFrom", - PropertyValueURI.fromUri(individualNodeB.uri)); - } + List individualNodes = members.stream() + .map(val -> graph.getNodeForPropertyValue(val)) + .filter(val -> val != null) + .collect(Collectors.toList()); + + for(OntologyNode individualNodeA : individualNodes) { + for(OntologyNode individualNodeB : individualNodes) { + if(individualNodeB.uri != individualNodeA.uri) { + individualNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#differentFrom", + PropertyValueURI.fromUri(individualNodeB.uri)); } } } + } } } long endTime3 = System.nanoTime(); - System.out.println("annotate disjointWith: " + ((endTime3 - startTime3) / 1000 / 1000 / 1000)); + logger.info("annotate disjointWith: {}", ((endTime3 - startTime3) / 1000 / 1000 / 1000)); } } From 841cb43742006bb5e1232078c35179772a61a6ef Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 5 Sep 2024 18:47:43 +0200 Subject: [PATCH 078/146] set convertToRDF option not to have argument for #46 --- .../src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java index 8fbc4b97b..86765c5a3 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java @@ -43,7 +43,7 @@ public static void main(String[] args) throws IOException { Option output = new Option(null, "output", true, "JSON output filename"); output.setRequired(true); options.addOption(output); - + Option loadLocalFiles = new Option(null, "loadLocalFiles", false, "Whether or not to load local files (unsafe, for testing)"); loadLocalFiles.setRequired(false); options.addOption(loadLocalFiles); @@ -51,8 +51,8 @@ public static void main(String[] args) throws IOException { Option noDates = new Option(null, "noDates", false, "Set to leave LOADED dates blank (for testing)"); noDates.setRequired(false); options.addOption(noDates); - - Option rdfConvert = new Option(null, "convertToRDF", true, "Whether or not to convert the ontology to RDF/Xml format before parsing."); + + Option rdfConvert = new Option(null, "convertToRDF", false, "Whether or not to convert the ontology to RDF/Xml format before parsing."); rdfConvert.setRequired(false); options.addOption(rdfConvert); @@ -184,10 +184,10 @@ public static void main(String[] args) throws IOException { actualReader.beginObject(); while (scanReader.peek() != JsonToken.END_OBJECT) { - + String name = scanReader.nextName(); actualReader.nextName(); - + if (name.equals("ontologies")) { scanReader.beginArray(); From e66ea8aafb3054a9b9f9108f815faccb322cc5c3 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 5 Sep 2024 20:46:11 +0200 Subject: [PATCH 079/146] assigned purl as the iri to ontologies with no iri for #48 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 97258d456..542574234 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -400,7 +400,10 @@ public void write(JsonWriter writer) throws Throwable { writer.value(ontologyId); writer.name("iri"); - writer.value(ontologyNode.uri); + if(ontologyNode.uri != null) + writer.value(ontologyNode.uri); + else + writer.value(config.get("ontology_purl").toString()); for (String configKey : config.keySet()) { Object configVal = config.get(configKey); From 62d25e025f6e24a15798654a85cb3b0ffc5380db Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 10 Sep 2024 16:30:15 +0200 Subject: [PATCH 080/146] made rdf2json idependent from execution directory and default input ontologies for #46 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 23 +- .../rdf2json/src/main/resources/result.owl | 854 --------- .../rdf2json/src/main/resources/result.ttl | 1529 ----------------- 3 files changed, 19 insertions(+), 2387 deletions(-) delete mode 100644 dataload/rdf2json/src/main/resources/result.owl delete mode 100644 dataload/rdf2json/src/main/resources/result.ttl diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 97258d456..9bde2fd9d 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -19,7 +19,9 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.net.URISyntaxException; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.stream.Collectors; @@ -102,15 +104,18 @@ private void parseRDF(String url, boolean convertToRDF) { } else { logger.debug("Downloading (no predownload path provided) {}", url); if (convertToRDF) { - String outputFile = "./src/main/resources/result"; + String outputFile = "result"; OWLOntology ont = convertOntologyToRDF(url, outputFile); OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = odf.getKey(); String ext = ".owl"; if (lang1.contains("Turtle")) ext = ".ttl"; - url = outputFile + ext; + String fileNameInUrl = outputFile + ext; + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + url = Paths.get(resourceDirectory.resolve(fileNameInUrl).toUri()).toString(); } + sourceFileTimestamp = System.currentTimeMillis(); createParser(null).source(url).parse(this); } @@ -119,6 +124,8 @@ private void parseRDF(String url, boolean convertToRDF) { throw new RuntimeException(e); } catch (IOException e) { throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } } @@ -169,11 +176,17 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I else if (!lang1.contains("RDF")) { isRDF = false; OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); - fos = new FileOutputStream(outputFile + ext); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+ext).toString(); + fos = new FileOutputStream(filePath); ont.saveOntology(odf1, fos); } if (isRDF) { - fos = new FileOutputStream(outputFile + ext); + OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation() + .toURI().getPath(); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+ext).toString(); + fos = new FileOutputStream(filePath); ont.saveOntology(fos); } } catch (OWLOntologyCreationException e) { @@ -182,6 +195,8 @@ else if (!lang1.contains("RDF")) { e.printStackTrace(); } catch (OWLOntologyStorageException e) { e.printStackTrace(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } finally { if (fos != null) fos.close(); diff --git a/dataload/rdf2json/src/main/resources/result.owl b/dataload/rdf2json/src/main/resources/result.owl deleted file mode 100644 index d205a6a80..000000000 --- a/dataload/rdf2json/src/main/resources/result.owl +++ /dev/null @@ -1,854 +0,0 @@ - - - - - - - This ontology is designed to represent many of the relations (i.e. object properties) that hold between entities at the level of the mid-level Common Core Ontologies. - Extended Relation Ontology - Version 1.5 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - An Alternative Label that consists of a shortened or abbreviated form of the rdfs:label and is used to denote the entity. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - acronym - - - - - - - - - A term or phrase that may be used in place of the stated rdfs:label to denote the entity in question. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - alternative label - - - - - - - - The name and description of the license under which the .owl file is released. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - code license - - - - - - - - The name and description of the license under which the ideas, concepts and other informational content expressed in the .owl file are released. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - content license - - - - - - - - An assertion of copyright - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - copyright - - - - - - - - A natural language explication of the meaning of the term. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - definition - - - - - - - - A citation of where all or some of the information used to create the term's Definition was acquired from. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - definition source - - - - - - - - A name or other identifier that is used to designate an individual. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - designator annotation - - - - - - - - An Acronym that is used by a Doctrinal Source to denote the entity. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - doctrinal acronym - - - - - - - - - A Definition that is taken directly from a Doctrinal Source. - There is only one definition for any given term in an ontology; however, a Doctrinal Definition may be provided in addition to the asserted Definition if the preservation of this information is important. When both a Definition and a Doctrinal Definition are provided for a term, the Definition takes precedence. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - doctrinal definition - - - - - - - - - An Alternative Label that consists of the preferred term or phrase used by a Doctrinal Source to denote the entity. - When the cco:doctrinal_label is identical to the rdfs:label, the cco:doctrinal_label annotation is superfluous. As a subclass of 'alternative label', 'doctrinal label' is intended to be used to provide additional information about the entity when its preferred doctrinal designator is ambiguous or otherwise inappropriate for use as the rdfs:label. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - doctrinal label - - - - - - - - - A Definition Source that consists of a formalized doctrine in which the term is authoritatively defined. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - doctrinal source - - - - - - - - - A clarification or further explanation of a term beyond what is included in the Definition or which is used when the term is primitive such that no non-circular definition can be given for it. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - elucidation - - - - - - - - A phrase, sentence or set of terms intended to convey the conventional usage of the term. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - example of usage - - - - - - - - A relation between an information content entity and a widely used measurement unit of the token used to express it. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has token unit - - - - - - - - The text of an HTTP request that can be sent to a SPARQL Protocol service. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - http query string - - - - - - - - A interval measurement value of an instance of a quality, realizable or process profile - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - interval measurement annotation - - - - - - - - - An annotation property that links a class, property, or named individual to the URI of the ontology where it is located. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is curated in ontology - - - - - - - - A relation between an information content entity and a widely used token used to express it. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is tokenized by - - - - - - - - A measurement value of an instance of a quality, reazlizable or process profile - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - measurement annotation - - - - - - - - A nominal measurement value of an instance of a quality, realizable or process profile - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - nominal measurement annotation - - - - - - - - - An ordinal measurement value of an instance of a quality, realizable or process profile - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - ordinal measurement annotation - - - - - - - - - The text of a query that is associated with a class - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - query text - - - - - - - - A ratio measurement value of an instance of a quality, realizable or process profile - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - ratio measurement annotation - - - - - - - - - The name of the Term Editor who added the term to the ontology. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - term creator - - - - - - - - - The name of a person who contributed to the development or enhancement of the term. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - term editor - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - y is_accessory_in x iff x is an instance of Process and y is an instance of Agent, such that y assists another agent in the commission of x, and y was not located at the location of x when x occurred, and y was not an agent_in x. - http://en.wikipedia.org/wiki/Accessory_(legal_term) - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - accessory in - - - - - - - - - - - - An agent a1 is accomplice_in some Processual Entity p1 iff a1 assists in the commission of p1, is located at the location of p1, but is not agent_in p1. - https://en.wikipedia.org/w/index.php?title=Accomplice&oldid=1002047204 - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - accomplice in - - - - - - - - - - - - p affects c iff p is an instance of a Process and c is an instance of a Continuant, such that p influences c in some manner, most often by producing a change in c. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - affects - - - - - - - - - - - - - - - - - - x aggregate_bearer_of y iff x is an instance of Object Aggregate and y is an instance of Specifically Dependent Continuant and z is an instance of Object, such that z bearer of y, and all other members of x are bearers of a unique instance of the same type as y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - aggregate bearer of - - - - - - - - - - - - x aggregate_has_disposition y iff x is an instance of Object Aggregate and y is an instance of Disposition, such that x aggregate_bearer_of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - aggregate has disposition - - - - - - - - - - - x aggregate_has_quality y iff x is an instance of Object Aggregate and y is an instance of Quality, such that x aggregate_bearer_of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - aggregate has quality - - - - - - - - - - - x aggregate_has_role y iff x is an instance of Object Aggregate and y is an instance of Role, such that x aggregate_bearer_of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - aggregate has role - - - - - - - - - - - x caused_by y iff x and y are instances of occurrents, and x is a consequence of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - caused by - - - - - - - - - - - x disposition_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Disposition, such that x disposition_of_aggregate y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - disposition of aggregate - - - - - - - - - - - 2022-12-30T21:32:27-05:00 - https://cubrc.org - A relation where one process disrupts another process from occurring as it would have. - A process can disrupt another process from occurring as it would have by 1) preventing a disposition or role from being realized by that process, 2) lowering the grade of the process, or 3) stopping the process from continuing to occur. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/AgentOntology - disrupts - To lower the grade of a process is to lower the quality of a process according to some standard, for example when realizing a capability or a function. - disrupts - - - - - - - - - - - x has_accessory y iff x is an instance of Process and y is an instance of Agent, such that y assists another agent in the commission of x, and y was not located at the location of x when x occurred, and y was not an agent_in x. - http://en.wikipedia.org/wiki/Accessory_(legal_term) - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has accessory - - - - - - - - - - - A Processual Entity p1 has_accomplice some agent a1 iff a1 assists in the commission of p1, is located at the location of p1, but is not agent_in p1. - https://en.wikipedia.org/w/index.php?title=Accomplice&oldid=1002047204 - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has accomplice - - - - - - - - - - - - y has_input x iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the beginning of y is a necessary condition for the start of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has input - - - - - - - - - - An instance of an Object Aggregate 'has member of located in' an instance of some material entity if and only if every member of that Aggregate is located in the same instance of that material entity. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has member of located in - - - - - - - - - - If p is a process and c is a continuant, then p has object c if and only if the c is part of the projected state that the agent intends to achieve by performing p. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has object - - - - - - - - - - - - y has_output x iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the end of y is a necessary condition for the completion of y. - https://en.wikipedia.org/w/index.php?title=IPO_model&oldid=1024398398 - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has output - - - - - - - - - - - - x has_process_part y iff x and y are instances of Process, such that y occurs during the temporal interval of x, and y either provides an input to x or receives an output of x, or both. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - has process part - - - - - - - - - - - - - - - - - x inheres_in_aggregate y iff x is an instance of Specifically Dependent Continuant and y is an instance of Object Aggregate and z is an instance of Object, such that z bearer_of x, and all other members of y are bearers of a unique instance of the same type as x. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - inheres in aggregate - - - - - - - - - - - c is_affected_by p iff p is an instance of a Process and c is an instance of a Continuant, such that p influences c in some manner, most often by producing a change in c. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is affected by - - - - - - - - - - x is_cause_of y iff x and y are instances of occurrents, and y is a consequence of x. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is cause of - - - - - - - - - - 2022-12-30T21:32:27-05:00 - https://cubrc.org - Inverse of disrupts. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is disrupted by - is disrupted by - - - - - - - - - - - x is_input_of y iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the beginning of y is a necessary condition for the start of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is input of - - - - - - - - - - - - An object o is made of an object m when m is the material that o consists of and that material does not undergo a change of kind during the creation of o - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is made of - - - - - - - - - - An object m is material of an object o when m is the material of which o consists and that material does not undergo a change of kind during the creation of o - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is material of - - - - - - - - - If p is a process and c is a continuant, then c is object of p if and only if the c is part of the projected state that the agent intends to achieve by performing p. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is object of - - - - - - - - - - - x is_output_of y iff x is an instance of Continuant and y is an instance of Process, such that the presence of x at the end of y is a necessary condition for the completion of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is output of - - - - - - - - - - - x is_part_of_process y iff x and y are instances of Process, such that x occurs during the temporal interval of y, and x either provides an input to y or receives an output of y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is part of process - - - - - - - - - - - A continuant c1 is a predecessor of some continuant c2 iff there is some process p1 and c1 is an input to p1 and c2 is an output of p1. - More informally, c1 is a predecessor of c2 iff c1 has been followed or replaced by c2. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is predecessor of - - - - - - - - - - - - x is_site_of y iff x is an instance of Site and y is an instance of Process, such that y occurs in x. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is site of - - - - - - - - - - A continuant c2 is a successor of some continuant c1 iff there is some process p1 and c1 is an input to p1 and c2 is an output of p1. Inverse of is predecessor. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is successor of - - - - - - - - - - - - - - - - - x is_temporal_region_of y iff y is an instance of a process or process boundary and x is an instance of a temporal region, such that the duration of x temporally projects on y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - is temporal region of - Leaving this is in ERO for now since BFO2020 has no inverse of occupies-temporal-region yet. - - - - - - - - - - - x occurs_at y iff x is an instance of Process and y is an instance of Site, such that x occurs in y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - occurs at - - - - - - - - - - x process_started_by y iff x and y are instances of processes, and x is caused_by y, and i is an instance of a temporal instant, and r is an instant of a temporal interval, and x has starting instance i, and y occurs on r, and r interval contains i. - A process x is started by another process y when y causes x while y is still occurring. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - process started by - - - - - - - - - x process_starts y iff x and y are instances of processes, and x is_cause_of y, and i is an instance of a temporal instant, and r is an instant of a temporal interval, and y has starting instance i, and x occurs on r, and r interval contains i. - A process x starts another process y when x causes y while x is still occurring. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - process starts - - - - - - - - - - - x quality_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Quality, such that x disposition_of_aggregate y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - quality of aggregate - - - - - - - - - - - x role_of_aggregate y iff y is an instance of Object Aggregate and x is an instance of Role, such that x disposition_of_aggregate y. - http://www.ontologyrepository.com/CommonCoreOntologies/Mid/ExtendedRelationOntology - role of aggregate - - - - - - - diff --git a/dataload/rdf2json/src/main/resources/result.ttl b/dataload/rdf2json/src/main/resources/result.ttl deleted file mode 100644 index 76c66efb0..000000000 --- a/dataload/rdf2json/src/main/resources/result.ttl +++ /dev/null @@ -1,1529 +0,0 @@ -@base . -@prefix : . -@prefix dc: . -@prefix owl: . -@prefix rdf: . -@prefix xml: . -@prefix xsd: . -@prefix dc11: . -@prefix rdfs: . -@prefix skos: . -# -# -# ################################################################# -# # -# # Annotation properties -# # -# ################################################################# -# -# -# http://purl.org/dc/elements/1.1/contributor -# -# http://purl.org/dc/elements/1.1/identifier -# -# http://purl.org/dc/elements/1.1/license -# -# http://purl.org/dc/terms/description -# -# http://purl.org/dc/terms/license -# -# http://purl.org/dc/terms/title -# -# http://www.w3.org/2004/02/skos/core#altLabel -# -# http://www.w3.org/2004/02/skos/core#definition -# -# http://www.w3.org/2004/02/skos/core#example -# -# http://www.w3.org/2004/02/skos/core#prefLabel -# -# http://www.w3.org/2004/02/skos/core#scopeNote -# -# -# -# ################################################################# -# # -# # Object Properties -# # -# ################################################################# -# -# -# http://purl.obolibrary.org/obo/BFO_0000054 -# -# http://purl.obolibrary.org/obo/BFO_0000055 -# -# http://purl.obolibrary.org/obo/BFO_0000056 -# -# http://purl.obolibrary.org/obo/BFO_0000057 -# -# http://purl.obolibrary.org/obo/BFO_0000058 -# -# http://purl.obolibrary.org/obo/BFO_0000059 -# -# http://purl.obolibrary.org/obo/BFO_0000062 -# -# http://purl.obolibrary.org/obo/BFO_0000063 -# -# http://purl.obolibrary.org/obo/BFO_0000066 -# -# http://purl.obolibrary.org/obo/BFO_0000084 -# -# http://purl.obolibrary.org/obo/BFO_0000101 -# -# http://purl.obolibrary.org/obo/BFO_0000108 -# -# http://purl.obolibrary.org/obo/BFO_0000115 -# -# http://purl.obolibrary.org/obo/BFO_0000117 -# -# http://purl.obolibrary.org/obo/BFO_0000121 -# -# http://purl.obolibrary.org/obo/BFO_0000124 -# -# http://purl.obolibrary.org/obo/BFO_0000127 -# -# http://purl.obolibrary.org/obo/BFO_0000129 -# -# http://purl.obolibrary.org/obo/BFO_0000132 -# -# http://purl.obolibrary.org/obo/BFO_0000139 -# -# http://purl.obolibrary.org/obo/BFO_0000153 -# -# http://purl.obolibrary.org/obo/BFO_0000171 -# -# http://purl.obolibrary.org/obo/BFO_0000176 -# -# http://purl.obolibrary.org/obo/BFO_0000178 -# -# http://purl.obolibrary.org/obo/BFO_0000183 -# -# http://purl.obolibrary.org/obo/BFO_0000184 -# -# http://purl.obolibrary.org/obo/BFO_0000185 -# -# http://purl.obolibrary.org/obo/BFO_0000194 -# -# http://purl.obolibrary.org/obo/BFO_0000195 -# -# http://purl.obolibrary.org/obo/BFO_0000196 -# -# http://purl.obolibrary.org/obo/BFO_0000197 -# -# http://purl.obolibrary.org/obo/BFO_0000199 -# -# http://purl.obolibrary.org/obo/BFO_0000200 -# -# http://purl.obolibrary.org/obo/BFO_0000210 -# -# http://purl.obolibrary.org/obo/BFO_0000216 -# -# http://purl.obolibrary.org/obo/BFO_0000218 -# -# http://purl.obolibrary.org/obo/BFO_0000221 -# -# http://purl.obolibrary.org/obo/BFO_0000222 -# -# http://purl.obolibrary.org/obo/BFO_0000223 -# -# http://purl.obolibrary.org/obo/BFO_0000224 -# -# -# -# ################################################################# -# # -# # Classes -# # -# ################################################################# -# -# -# http://purl.obolibrary.org/obo/BFO_0000001 -# -# http://purl.obolibrary.org/obo/BFO_0000002 -# -# http://purl.obolibrary.org/obo/BFO_0000003 -# -# http://purl.obolibrary.org/obo/BFO_0000004 -# -# http://purl.obolibrary.org/obo/BFO_0000006 -# -# http://purl.obolibrary.org/obo/BFO_0000008 -# -# http://purl.obolibrary.org/obo/BFO_0000009 -# -# http://purl.obolibrary.org/obo/BFO_0000011 -# -# http://purl.obolibrary.org/obo/BFO_0000015 -# -# http://purl.obolibrary.org/obo/BFO_0000016 -# -# http://purl.obolibrary.org/obo/BFO_0000017 -# -# http://purl.obolibrary.org/obo/BFO_0000018 -# -# http://purl.obolibrary.org/obo/BFO_0000019 -# -# http://purl.obolibrary.org/obo/BFO_0000020 -# -# http://purl.obolibrary.org/obo/BFO_0000023 -# -# http://purl.obolibrary.org/obo/BFO_0000024 -# -# http://purl.obolibrary.org/obo/BFO_0000026 -# -# http://purl.obolibrary.org/obo/BFO_0000027 -# -# http://purl.obolibrary.org/obo/BFO_0000028 -# -# http://purl.obolibrary.org/obo/BFO_0000029 -# -# http://purl.obolibrary.org/obo/BFO_0000030 -# -# http://purl.obolibrary.org/obo/BFO_0000031 -# -# http://purl.obolibrary.org/obo/BFO_0000034 -# -# http://purl.obolibrary.org/obo/BFO_0000035 -# -# http://purl.obolibrary.org/obo/BFO_0000038 -# -# http://purl.obolibrary.org/obo/BFO_0000040 -# -# http://purl.obolibrary.org/obo/BFO_0000140 -# -# http://purl.obolibrary.org/obo/BFO_0000141 -# -# http://purl.obolibrary.org/obo/BFO_0000142 -# -# http://purl.obolibrary.org/obo/BFO_0000145 -# -# http://purl.obolibrary.org/obo/BFO_0000146 - - a owl:Ontology; - owl:versionIRI ; - dc11:contributor "Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bill Duncan", - "Bjoern Peters", "Chris Mungall", "David Osumi-Sutherland", "Fabian Neuhaus", "James A. Overton", - "Janna Hastings", "Jie Zheng", "John Beverley", "Jonathan Bona", "Larry Hunter", "Leonard Jacuzzo", - "Ludger Jansen", "Mark Jensen", "Mark Ressler", "Mathias Brochhausen", "Mauricio Almeida", - "Melanie Courtot", "Neil Otte", "Pierre Grenon", "Randall Dipert", "Robert Rovetto", - "Ron Rudnicki", "Stefan Schulz", "Thomas Bittner", "Werner Ceusters", "Yongqun \"Oliver\" He"; - dc:description "Basic Formal Ontology implemented in the Web Ontology Language (OWL 2) with direct semantics."@en; - dc:license ; - dc:title "BFO 2020"; - rdfs:comment "The most recent version of this file will always be in the GitHub repository https://github.com/bfo-ontology/bfo-2020" . - -dc11:contributor a owl:AnnotationProperty . - -dc11:identifier a owl:AnnotationProperty . - -dc11:license a owl:AnnotationProperty . - -dc:description a owl:AnnotationProperty . - -dc:license a owl:AnnotationProperty . - -dc:title a owl:AnnotationProperty . - -skos:altLabel a owl:AnnotationProperty . - -skos:definition a owl:AnnotationProperty . - -skos:example a owl:AnnotationProperty . - -skos:prefLabel a owl:AnnotationProperty . - -skos:scopeNote a owl:AnnotationProperty . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "206-BFO"; - rdfs:label "has realization"@en; - skos:altLabel "realized in"@en; - skos:definition "b has realization c =Def c realizes b"@en; - skos:example "As for realizes"@en . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "059-BFO"; - rdfs:label "realizes"@en; - skos:definition "(Elucidation) realizes is a relation between a process b and realizable entity c such that c inheres in some d & for all t, if b has participant d then c exists & the type instantiated by b is correlated with the type instantiated by c"@en; - skos:example "A balding process realizes a disposition to go bald; a studying process realizes a student role; a process of pumping blood realizes the pumping function of a heart"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain _:genid1; - rdfs:range ; - dc11:identifier "250-BFO"; - rdfs:label "participates in"@en; - skos:definition "(Elucidation) participates in holds between some b that is either a specifically dependent continuant or generically dependent continuant or independent continuant that is not a spatial region & some process p such that b participates in p some way"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid1 a owl:Class; - owl:unionOf _:genid8 . - -_:genid8 a rdf:List; - rdf:first ; - rdf:rest _:genid7 . - -_:genid7 a rdf:List; - rdf:first ; - rdf:rest _:genid2 . - -_:genid2 a rdf:List; - rdf:first _:genid3; - rdf:rest rdf:nil . - -_:genid3 a owl:Class; - owl:intersectionOf _:genid6 . - -_:genid6 a rdf:List; - rdf:first ; - rdf:rest _:genid4 . - -_:genid4 a rdf:List; - rdf:first _:genid5; - rdf:rest rdf:nil . - -_:genid5 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range _:genid9; - dc11:identifier "248-BFO"; - rdfs:label "has participant"@en; - skos:definition "p has participant c =Def c participates in p"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid9 a owl:Class; - owl:unionOf _:genid16 . - -_:genid16 a rdf:List; - rdf:first ; - rdf:rest _:genid15 . - -_:genid15 a rdf:List; - rdf:first ; - rdf:rest _:genid10 . - -_:genid10 a rdf:List; - rdf:first _:genid11; - rdf:rest rdf:nil . - -_:genid11 a owl:Class; - owl:intersectionOf _:genid14 . - -_:genid14 a rdf:List; - rdf:first ; - rdf:rest _:genid12 . - -_:genid12 a rdf:List; - rdf:first _:genid13; - rdf:rest rdf:nil . - -_:genid13 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range _:genid17; - dc11:identifier "258-BFO"; - rdfs:label "is concretized by"@en; - skos:definition "c is concretized by b =Def b concretizes c"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid17 a owl:Class; - owl:unionOf _:genid19 . - -_:genid19 a rdf:List; - rdf:first ; - rdf:rest _:genid18 . - -_:genid18 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty; - rdfs:domain _:genid20; - rdfs:range ; - dc11:identifier "256-BFO"; - rdfs:label "concretizes"@en; - skos:definition "b concretizes c =Def b is a process or a specifically dependent continuant & c is a generically dependent continuant & there is some time t such that c is the pattern or content which b shares at t with actual or potential copies"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid20 a owl:Class; - owl:unionOf _:genid22 . - -_:genid22 a rdf:List; - rdf:first ; - rdf:rest _:genid21 . - -_:genid21 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty, owl:TransitiveProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "213-BFO"; - rdfs:label "preceded by"@en; - skos:definition "b preceded by c =Def b precedes c"@en; - skos:example "The temporal region occupied by the second half of the match is preceded by the temporal region occupied by the first half of the match"@en . - - a owl:ObjectProperty, owl:TransitiveProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "270-BFO"; - rdfs:label "precedes"@en; - skos:definition "(Elucidation) precedes is a relation between occurrents o, o' such that if t is the temporal extent of o & t' is the temporal extent of o' then either the last instant of o is before the first instant of o' or the last instant of o is the first instant of o' & neither o nor o' are temporal instants"@en; - skos:example "The temporal region occupied by Mary's birth precedes the temporal region occupied by Mary's death."@en; - skos:scopeNote "Each temporal region is its own temporal extent. The temporal extent of a spatiotemporal region is the temporal region it temporally projects onto. The temporal extent of a process or process boundary that occupies temporal region t is t.", - "Precedes defines a strict partial order on occurrents." . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain _:genid23; - rdfs:range _:genid26; - dc11:identifier "143-BFO"; - rdfs:label "occurs in"@en; - skos:definition "b occurs in c =Def b is a process or a process boundary & c is a material entity or site & there exists a spatiotemporal region r & b occupies spatiotemporal region r & for all time t, if b exists at t then c exists at t & there exist spatial regions s and s' where b spatially projects onto s at t & c occupies spatial region s' at t & s is a continuant part of s' at t"@en; - skos:example "A process of digestion occurs in the interior of an organism; a process of loading artillery rounds into a tank cannon occurs in the interior of the tank"@en . - -_:genid23 a owl:Class; - owl:unionOf _:genid25 . - -_:genid25 a rdf:List; - rdf:first ; - rdf:rest _:genid24 . - -_:genid24 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid26 a owl:Class; - owl:unionOf _:genid28 . - -_:genid28 a rdf:List; - rdf:first ; - rdf:rest _:genid27 . - -_:genid27 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range _:genid29; - dc11:identifier "252-BFO"; - rdfs:label "generically depends on"@en; - skos:altLabel "g-depends on"@en; - skos:definition "b generically depends on c =Def b is a generically dependent continuant & c is an independent continuant that is not a spatial region & at some time t there inheres in c a specifically dependent continuant which concretizes b at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid29 a owl:Class; - owl:intersectionOf _:genid32 . - -_:genid32 a rdf:List; - rdf:first ; - rdf:rest _:genid30 . - -_:genid30 a rdf:List; - rdf:first _:genid31; - rdf:rest rdf:nil . - -_:genid31 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:domain _:genid33; - rdfs:range ; - dc11:identifier "254-BFO"; - rdfs:label "is carrier of"@en; - skos:definition "b is carrier of c =Def there is some time t such that c generically depends on b at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid33 a owl:Class; - owl:intersectionOf _:genid36 . - -_:genid36 a rdf:List; - rdf:first ; - rdf:rest _:genid34 . - -_:genid34 a rdf:List; - rdf:first _:genid35; - rdf:rest rdf:nil . - -_:genid35 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "118-BFO"; - rdfs:label "exists at"@en; - skos:definition "(Elucidation) exists at is a relation between a particular and some temporal region at which the particular exists"@en; - skos:example "First World War exists at 1914-1916; Mexico exists at January 1, 2000"@en . - - a owl:ObjectProperty; - rdfs:subPropertyOf ; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "230-BFO"; - rdfs:label "has member part"@en; - skos:definition "b has member part c =Def c member part of b"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty, owl:TransitiveProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "202-BFO"; - rdfs:label "has occurrent part"@en; - skos:definition "b has occurrent part c =Def c occurrent part of b"@en; - skos:example "Mary's life has occurrent part Mary's 5th birthday"@en . - - a owl:ObjectProperty, owl:TransitiveProperty; - rdfs:subPropertyOf ; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "211-BFO"; - rdfs:label "has temporal part"@en; - skos:definition "b has temporal part c =Def c temporal part of b"@en; - skos:example "Your life has temporal part the first year of your life"@en . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain _:genid37; - rdfs:range _:genid41; - dc11:identifier "236-BFO"; - rdfs:label "location of"@en; - skos:definition "b location of c =Def c located in b"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid37 a owl:Class; - owl:intersectionOf _:genid40 . - -_:genid40 a rdf:List; - rdf:first ; - rdf:rest _:genid38 . - -_:genid38 a rdf:List; - rdf:first _:genid39; - rdf:rest rdf:nil . - -_:genid39 a owl:Class; - owl:complementOf . - -_:genid41 a owl:Class; - owl:intersectionOf _:genid44 . - -_:genid44 a rdf:List; - rdf:first ; - rdf:rest _:genid42 . - -_:genid42 a rdf:List; - rdf:first _:genid43; - rdf:rest rdf:nil . - -_:genid43 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "244-BFO"; - rdfs:label "material basis of"@en; - skos:definition "b material basis of c =Def c has material basis b"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - rdfs:subPropertyOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "228-BFO"; - rdfs:label "member part of"@en; - skos:definition "b member part of c =Def b is an object & c is a material entity & there is some time t such that b continuant part of c at t & there is a mutually exhaustive and pairwise disjoint partition of c into objects x1, ..., xn (for some n ≠ 1) with b = xi (for some 1 <= i <= n)"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty, owl:TransitiveProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "003-BFO"; - rdfs:label "occurrent part of"@en; - skos:definition "(Elucidation) occurrent part of is a relation between occurrents b and c when b is part of c"@en; - skos:example "Mary's 5th birthday is an occurrent part of Mary's life; the first set of the tennis match is an occurrent part of the tennis match"@en . - - a owl:ObjectProperty, owl:TransitiveProperty; - rdfs:subPropertyOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "078-BFO"; - rdfs:label "temporal part of"@en; - skos:definition "b temporal part of c =Def b occurrent part of c & (b and c are temporal regions) or (b and c are spatiotemporal regions & b temporally projects onto an occurrent part of the temporal region that c temporally projects onto) or (b and c are processes or process boundaries & b occupies a temporal region that is an occurrent part of the temporal region that c occupies)"@en; - skos:example "Your heart beating from 4pm to 5pm today is a temporal part of the process of your heart beating; the 4th year of your life is a temporal part of your life, as is the process boundary which separates the 3rd and 4th years of your life; the first quarter of a game of football is a temporal part of the whole game"@en . - - a owl:ObjectProperty, owl:FunctionalProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "080-BFO"; - rdfs:label "temporally projects onto"@en; - skos:definition "(Elucidation) temporally projects onto is a relation between a spatiotemporal region s and some temporal region which is the temporal extent of s"@en; - skos:example "The world line of a particle temporally projects onto the temporal region extending from the beginning to the end of the existence of the particle"@en . - - a owl:ObjectProperty; - rdfs:domain _:genid45; - rdfs:range _:genid49; - dc11:identifier "234-BFO"; - rdfs:label "located in"@en; - skos:definition "b located in c =Def b is an independent continuant & c is an independent & neither is a spatial region & there is some time t such that the spatial region which b occupies at t is continuant part of the spatial region which c occupies at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid45 a owl:Class; - owl:intersectionOf _:genid48 . - -_:genid48 a rdf:List; - rdf:first ; - rdf:rest _:genid46 . - -_:genid46 a rdf:List; - rdf:first _:genid47; - rdf:rest rdf:nil . - -_:genid47 a owl:Class; - owl:complementOf . - -_:genid49 a owl:Class; - owl:intersectionOf _:genid52 . - -_:genid52 a rdf:List; - rdf:first ; - rdf:rest _:genid50 . - -_:genid50 a rdf:List; - rdf:first _:genid51; - rdf:rest rdf:nil . - -_:genid51 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "221-BFO"; - rdfs:label "continuant part of"@en; - skos:definition "b continuant part of c =Def b and c are continuants & there is some time t such that b and c exist at t & b continuant part of c at t"@en; - skos:example "Milk teeth continuant part of human; surgically removed tumour continuant part of organism"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "271-BFO"; - rdfs:label "has continuant part"@en; - skos:definition "b has continuant part c =Def c continuant part of b"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - rdfs:domain _:genid53; - rdfs:range _:genid56; - dc11:identifier "267-BFO"; - rdfs:label "environs"@en; - skos:altLabel "contains process"@en; - skos:definition "b environs c =Def c occurs in b"@en; - skos:example "Mouth environs process of mastication; city environs traffic"@en . - -_:genid53 a owl:Class; - owl:unionOf _:genid55 . - -_:genid55 a rdf:List; - rdf:first ; - rdf:rest _:genid54 . - -_:genid54 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid56 a owl:Class; - owl:unionOf _:genid58 . - -_:genid58 a rdf:List; - rdf:first ; - rdf:rest _:genid57 . - -_:genid57 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty, owl:FunctionalProperty, owl:InverseFunctionalProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "144-BFO"; - rdfs:label "history of"@en; - skos:definition "(Elucidation) history of is a relation between history b and material entity c such that b is the unique history of c"@en; - skos:example "This life is the history of this organism"@en . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "145-BFO"; - rdfs:label "has history"@en; - skos:definition "b has history c =Def c history of b"@en; - skos:example "This organism has history this life"@en . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain _:genid59; - rdfs:range ; - dc11:identifier "260-BFO"; - rdfs:label "specifically depended on by"@en; - skos:altLabel "s-depended on by"@en; - skos:definition "b specifically depended on by c =Def c specifically depends on b"@en; - skos:example "Coloured object specifically depended on by colour"@en . - -_:genid59 a owl:Class; - owl:unionOf _:genid65 . - -_:genid65 a rdf:List; - rdf:first ; - rdf:rest _:genid60 . - -_:genid60 a rdf:List; - rdf:first _:genid61; - rdf:rest rdf:nil . - -_:genid61 a owl:Class; - owl:intersectionOf _:genid64 . - -_:genid64 a rdf:List; - rdf:first ; - rdf:rest _:genid62 . - -_:genid62 a rdf:List; - rdf:first _:genid63; - rdf:rest rdf:nil . - -_:genid63 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range _:genid66; - dc11:identifier "012-BFO"; - rdfs:label "specifically depends on"@en; - skos:altLabel "s-depends on"@en; - skos:definition "(Elucidation) specifically depends on is a relation between a specifically dependent continuant b and specifically dependent continuant or independent continuant that is not a spatial region c such that b and c share no parts in common & b is of a nature such that at all times t it cannot exist unless c exists & b is not a boundary of c"@en; - skos:example "A shape specifically depends on the shaped object; hue, saturation and brightness of a colour sample specifically depends on each other"@en; - skos:scopeNote "The analogue of specifically depends on for occurrents is has participant."@en . - -_:genid66 a owl:Class; - owl:unionOf _:genid72 . - -_:genid72 a rdf:List; - rdf:first ; - rdf:rest _:genid67 . - -_:genid67 a rdf:List; - rdf:first _:genid68; - rdf:rest rdf:nil . - -_:genid68 a owl:Class; - owl:intersectionOf _:genid71 . - -_:genid71 a rdf:List; - rdf:first ; - rdf:rest _:genid69 . - -_:genid69 a rdf:List; - rdf:first _:genid70; - rdf:rest rdf:nil . - -_:genid70 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:subPropertyOf ; - owl:inverseOf ; - rdfs:domain _:genid73; - rdfs:range ; - dc11:identifier "053-BFO"; - rdfs:label "bearer of"@en; - skos:definition "b bearer of c =Def c inheres in b"@en; - skos:example "A patch of ink is the bearer of a colour quality; an organism is the bearer of a temperature quality"@en . - -_:genid73 a owl:Class; - owl:intersectionOf _:genid76 . - -_:genid76 a rdf:List; - rdf:first ; - rdf:rest _:genid74 . - -_:genid74 a rdf:List; - rdf:first _:genid75; - rdf:rest rdf:nil . - -_:genid75 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:subPropertyOf ; - rdfs:domain ; - rdfs:range _:genid77; - dc11:identifier "051-BFO"; - rdfs:label "inheres in"@en; - skos:definition "b inheres in c =Def b is a specifically dependent continuant & c is an independent continuant that is not a spatial region & b specifically depends on c"@en; - skos:example "A shape inheres in a shaped object; a mass inheres in a material entity"@en . - -_:genid77 a owl:Class; - owl:intersectionOf _:genid80 . - -_:genid80 a rdf:List; - rdf:first ; - rdf:rest _:genid78 . - -_:genid78 a rdf:List; - rdf:first _:genid79; - rdf:rest rdf:nil . - -_:genid79 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty, owl:FunctionalProperty; - rdfs:domain _:genid81; - rdfs:range ; - dc11:identifier "132-BFO"; - rdfs:label "occupies temporal region"@en; - skos:definition "p occupies temporal region t =Def p is a process or process boundary & the spatiotemporal region occupied by p temporally projects onto t"@en; - skos:example "The Second World War occupies the temporal region September 1, 1939 - September 2, 1945"@en . - -_:genid81 a owl:Class; - owl:unionOf _:genid83 . - -_:genid83 a rdf:List; - rdf:first ; - rdf:rest _:genid82 . - -_:genid82 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty, owl:FunctionalProperty; - rdfs:domain _:genid84; - rdfs:range ; - dc11:identifier "082-BFO"; - rdfs:label "occupies spatiotemporal region"@en; - skos:definition "(Elucidation) occupies spatiotemporal region is a relation between a process or process boundary p and the spatiotemporal region s which is its spatiotemporal extent"@en; - skos:example "A particle emitted by a nuclear reactor occupies the spatiotemporal region which is its trajectory"@en . - -_:genid84 a owl:Class; - owl:unionOf _:genid86 . - -_:genid86 a rdf:List; - rdf:first ; - rdf:rest _:genid85 . - -_:genid85 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:ObjectProperty; - rdfs:domain _:genid87; - rdfs:range ; - dc11:identifier "232-BFO"; - rdfs:label "occupies spatial region"@en; - skos:definition "b occupies spatial region r =Def b is an independent continuant that is not a spatial region & r is a spatial region & there is some time t such that every continuant part of b occupies some continuant part of r at t and no continuant part of b occupies any spatial region that is not a continuant part of r at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - -_:genid87 a owl:Class; - owl:intersectionOf _:genid90 . - -_:genid90 a rdf:List; - rdf:first ; - rdf:rest _:genid88 . - -_:genid88 a rdf:List; - rdf:first _:genid89; - rdf:rest rdf:nil . - -_:genid89 a owl:Class; - owl:complementOf . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "246-BFO"; - rdfs:label "spatially projects onto"@en; - skos:definition "(Elucidation) spatially projects onto is a relation between some spatiotemporal region b and spatial region c such that at some time t, c is the spatial extent of b at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "242-BFO"; - rdfs:label "has material basis"@en; - skos:definition "b has material basis c =Def b is a disposition & c is a material entity & there is some d bearer of b & there is some time t such that c is a continuant part of d at t & d has disposition b because c is a continuant part of d at t"@en; - skos:scopeNote "Users that require more sophisticated representations of time are encouraged to import a temporal extension of BFO-Core provided by the BFO development team. See documentation for guidance: "@en . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "268-BFO"; - rdfs:label "first instant of"@en; - skos:definition "t first instant of t' =Def t is a temporal instant & t' is a temporal region t' & t precedes all temporal parts of t' other than t"@en; - skos:example "An hour starting at midnight yesterday has first instant midnight yesterday"@en . - - a owl:ObjectProperty, owl:FunctionalProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "261-BFO"; - rdfs:label "has first instant"@en; - skos:definition "t has first instant t' =Def t' first instant of t"@en; - skos:example "The first hour of a year has first instant midnight on December 31"@en . - - a owl:ObjectProperty; - owl:inverseOf ; - rdfs:domain ; - rdfs:range ; - dc11:identifier "269-BFO"; - rdfs:label "last instant of"@en; - skos:definition "t last instant of t' =Def t is a temporal instant & t' is a temporal region & all temporal parts of t' other than t precede t"@en; - skos:example "Last midnight is the last instant of yesterday"@en . - - a owl:ObjectProperty, owl:FunctionalProperty; - rdfs:domain ; - rdfs:range ; - dc11:identifier "215-BFO"; - rdfs:label "has last instant"@en; - skos:definition "t has last instant t' =Def t' last instant of t"@en; - skos:example "The last hour of a year has last instant midnight December 31"@en . - - a owl:Class; - dc11:identifier "001-BFO"; - rdfs:label "entity"@en; - skos:definition "(Elucidation) An entity is anything that exists or has existed or will exist"@en; - skos:example "Julius Caesar; the Second World War; your body mass index; Verdi's Requiem"@en . - - a owl:Class; - rdfs:subClassOf , _:genid91; - owl:disjointWith ; - dc11:identifier "008-BFO"; - rdfs:label "continuant"@en; - skos:definition "(Elucidation) A continuant is an entity that persists, endures, or continues to exist through time while maintaining its identity"@en; - skos:example "A human being; a tennis ball; a cave; a region of space; someone's temperature"@en . - -_:genid91 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "077-BFO"; - rdfs:label "occurrent"@en; - skos:definition "(Elucidation) An occurrent is an entity that unfolds itself in time or it is the start or end of such an entity or it is a temporal or spatiotemporal region"@en; - skos:example "As for process, history, process boundary, spatiotemporal region, zero-dimensional temporal region, one-dimensional temporal region, temporal interval, temporal instant."@en . - - a owl:Class; - rdfs:subClassOf , _:genid92; - dc11:identifier "017-BFO"; - rdfs:label "independent continuant"@en; - skos:definition "b is an independent continuant =Def b is a continuant & there is no c such that b specifically depends on c or b generically depends on c"@en; - skos:example "An atom; a molecule; an organism; a heart; a chair; the bottom right portion of a human torso; a leg; the interior of your mouth; a spatial region; an orchestra"@en . - -_:genid92 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid93; - dc11:identifier "035-BFO"; - rdfs:label "spatial region"@en; - skos:definition "(Elucidation) A spatial region is a continuant entity that is a continuant part of the spatial projection of a portion of spacetime at a given time"@en; - skos:example "As for zero-dimensional spatial region, one-dimensional spatial region, two-dimensional spatial region, three-dimensional spatial region"@en . - -_:genid93 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid94, _:genid95; - dc11:identifier "100-BFO"; - rdfs:label "temporal region"@en; - skos:definition "(Elucidation) A temporal region is an occurrent over which processes can unfold"@en; - skos:example "As for zero-dimensional temporal region and one-dimensional temporal region"@en . - -_:genid94 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid95 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid96; - dc11:identifier "039-BFO"; - rdfs:label "two-dimensional spatial region"@en; - skos:definition "(Elucidation) A two-dimensional spatial region is a spatial region that is a whole consisting of a surface together with zero or more surfaces which may have spatial regions of lower dimension as parts"@en; - skos:example "The surface of a sphere-shaped part of space; an infinitely thin plane in space"@en . - -_:genid96 a owl:Restriction; - owl:allValuesFrom _:genid97; - owl:onProperty . - -_:genid97 a owl:Class; - owl:unionOf _:genid100 . - -_:genid100 a rdf:List; - rdf:first ; - rdf:rest _:genid99 . - -_:genid99 a rdf:List; - rdf:first ; - rdf:rest _:genid98 . - -_:genid98 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:Class; - rdfs:subClassOf , _:genid101, _:genid102; - dc11:identifier "095-BFO"; - rdfs:label "spatiotemporal region"@en; - skos:definition "(Elucidation) A spatiotemporal region is an occurrent that is an occurrent part of spacetime"@en; - skos:example "The spatiotemporal region occupied by the development of a cancer tumour; the spatiotemporal region occupied by an orbiting satellite"@en; - skos:scopeNote "'Spacetime' here refers to the maximal instance of the universal spatiotemporal region."@en . - -_:genid101 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid102 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid103, _:genid107, _:genid108; - dc11:identifier "083-BFO"; - rdfs:label "process"@en; - skos:altLabel "event"@en; - skos:definition "(Elucidation) p is a process means p is an occurrent that has some temporal proper part and for some time t, p has some material entity as participant"@en; - skos:example "An act of selling; the life of an organism; a process of sleeping; a process of cell-division; a beating of the heart; a process of meiosis; the taxiing of an aircraft; the programming of a computer"@en . - -_:genid103 a owl:Restriction; - owl:allValuesFrom _:genid104; - owl:onProperty . - -_:genid104 a owl:Class; - owl:unionOf _:genid106 . - -_:genid106 a rdf:List; - rdf:first ; - rdf:rest _:genid105 . - -_:genid105 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid107 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid108 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - owl:disjointWith ; - dc11:identifier "062-BFO"; - rdfs:label "disposition"@en; - skos:altLabel "internally-grounded realizable entity"@en; - skos:definition "(Elucidation) A disposition b is a realizable entity such that if b ceases to exist then its bearer is physically changed & b's realization occurs when and because this bearer is in some special physical circumstances & this realization occurs in virtue of the bearer's physical make-up"@en; - skos:example "An atom of element X has the disposition to decay to an atom of element Y; the cell wall is disposed to transport cellular material through endocytosis and exocytosis; certain people have a predisposition to colon cancer; children are innately disposed to categorize objects in certain ways"@en . - - a owl:Class; - rdfs:subClassOf ; - owl:disjointWith ; - dc11:identifier "058-BFO"; - rdfs:label "realizable entity"@en; - skos:definition "(Elucidation) A realizable entity is a specifically dependent continuant that inheres in some independent continuant which is not a spatial region & which is of a type some instances of which are realized in processes of a correlated type"@en; - skos:example "The role of being a doctor; the role of this boundary to delineate where Utah and Colorado meet; the function of your reproductive organs; the disposition of your blood to coagulate; the disposition of this piece of metal to conduct electricity"@en . - - a owl:Class; - rdfs:subClassOf , _:genid109; - dc11:identifier "037-BFO"; - rdfs:label "zero-dimensional spatial region"@en; - skos:definition "(Elucidation) A zero-dimensional spatial region is one or a collection of more than one spatially disjoint points in space"@en; - skos:example "The spatial region occupied at some time instant by the North Pole"@en . - -_:genid109 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "055-BFO"; - rdfs:label "quality"@en; - skos:definition "(Elucidation) A quality is a specifically dependent continuant that, in contrast to roles and dispositions, does not require any further process in order to be realized"@en; - skos:example "The colour of a tomato; the ambient temperature of this portion of air; the length of the circumference of your waist; the shape of your nose; the shape of your nostril; the mass of this piece of gold"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "050-BFO"; - rdfs:label "specifically dependent continuant"@en; - skos:definition "b is a specifically dependent continuant =Def b is a continuant & there is some independent continuant c which is not a spatial region & which is such that b specifically depends on c"@en; - skos:example "(with multiple bearers) John's love for Mary; the ownership relation between John and this statue; the relation of authority between John and his subordinates"@en, - "(with one bearer) The mass of this tomato; the pink colour of a medium rare piece of grilled filet mignon at its centre; the smell of this portion of mozzarella; the disposition of this fish to decay; the role of being a doctor; the function of this heart to pump blood; the shape of this hole"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "061-BFO"; - rdfs:label "role"@en; - skos:altLabel "externally-grounded realizable entity"@en; - skos:definition "(Elucidation) A role b is a realizable entity such that b exists because there is some single bearer that is in some special physical, social, or institutional set of circumstances in which this bearer does not have to be & b is not such that, if it ceases to exist, then the physical make-up of the bearer is thereby changed"@en; - skos:example "The priest role; the student role; the role of subject in a clinical trial; the role of a stone in marking a property boundary; the role of a boundary to demarcate two neighbouring administrative territories; the role of a building in serving as a military target"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "027-BFO"; - rdfs:label "fiat object part"@en; - skos:definition "(Elucidation) A fiat object part b is a material entity & such that if b exists then it is continuant part of some object c & demarcated from the remainder of c by one or more fiat surfaces"@en; - skos:example "The upper and lower lobes of the left lung; the dorsal and ventral surfaces of the body; the Western hemisphere of the Earth; the FMA:regional parts of an intact human body"@en . - - a owl:Class; - rdfs:subClassOf , _:genid110; - dc11:identifier "038-BFO"; - rdfs:label "one-dimensional spatial region"@en; - skos:definition "(Elucidation) A one-dimensional spatial region is a whole consisting of a line together with zero or more lines which may have points as parts"@en; - skos:example "An edge of a cube-shaped portion of space; a line connecting two points; two parallel lines extended in space"@en . - -_:genid110 a owl:Restriction; - owl:allValuesFrom _:genid111; - owl:onProperty . - -_:genid111 a owl:Class; - owl:unionOf _:genid113 . - -_:genid113 a rdf:List; - rdf:first ; - rdf:rest _:genid112 . - -_:genid112 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "025-BFO"; - rdfs:label "object aggregate"@en; - skos:definition "(Elucidation) An object aggregate is a material entity consisting exactly of a plurality (≥1) of objects as member parts which together form a unit"@en; - skos:example "The aggregate of the musicians in a symphony orchestra and their instruments; the aggregate of bearings in a constant velocity axle joint; the nitrogen atoms in the atmosphere; a collection of cells in a blood biobank"@en; - skos:scopeNote "'Exactly' means that there are no parts of the object aggregate other than its member parts.", - "The unit can, at certain times, consist of exactly one object, for example, when a wolf litter loses all but one of its pups, but it must at some time have a plurality of member parts." . - - a owl:Class; - rdfs:subClassOf , _:genid114; - dc11:identifier "040-BFO"; - rdfs:label "three-dimensional spatial region"@en; - skos:definition "(Elucidation) A three-dimensional spatial region is a whole consisting of a spatial volume together with zero or more spatial volumes which may have spatial regions of lower dimension as parts"@en; - skos:example "A cube-shaped region of space; a sphere-shaped region of space; the region of space occupied by all and only the planets in the solar system at some point in time"@en . - -_:genid114 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid115, _:genid119, _:genid123; - dc11:identifier "034-BFO"; - rdfs:label "site"@en; - skos:definition "(Elucidation) A site is a three-dimensional immaterial entity whose boundaries either (partially or wholly) coincide with the boundaries of one or more material entities or have locations determined in relation to some material entity"@en; - skos:example "A hole in a portion of cheese; a rabbit hole; the Grand Canyon; the Piazza San Marco; the kangaroo-joey-containing hole of a kangaroo pouch; your left nostril (a fiat part - the opening - of your left nasal cavity); the lumen of your gut; the hold of a ship; the interior of the trunk of your car; hole in an engineered floor joist"@en . - -_:genid115 a owl:Restriction; - owl:allValuesFrom _:genid116; - owl:onProperty . - -_:genid116 a owl:Class; - owl:unionOf _:genid118 . - -_:genid118 a rdf:List; - rdf:first ; - rdf:rest _:genid117 . - -_:genid117 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid119 a owl:Restriction; - owl:allValuesFrom _:genid120; - owl:onProperty . - -_:genid120 a owl:Class; - owl:unionOf _:genid122 . - -_:genid122 a rdf:List; - rdf:first ; - rdf:rest _:genid121 . - -_:genid121 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid123 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "024-BFO"; - rdfs:label "object"@en; - skos:definition "(Elucidation) An object is a material entity which manifests causal unity & is of a type instances of which are maximal relative to the sort of causal unity manifested"@en; - skos:example "An organism; a fish tank; a planet; a laptop; a valve; a block of marble; an ice cube"@en; - skos:scopeNote "A description of three primary sorts of causal unity is provided in Basic Formal Ontology 2.0. Specification and User Guide"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "074-BFO"; - rdfs:label "generically dependent continuant"@en; - skos:altLabel "g-dependent continuant"@en; - skos:definition "(Elucidation) A generically dependent continuant is an entity that exists in virtue of the fact that there is at least one of what may be multiple copies which is the content or the pattern that multiple copies would share"@en; - skos:example "The pdf file on your laptop; the pdf file that is a copy thereof on my laptop; the sequence of this protein molecule; the sequence that is a copy thereof in that protein molecule; the content that is shared by a string of dots and dashes written on a page and the transmitted Morse code signal; the content of a sentence; an engineering blueprint"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "064-BFO"; - rdfs:label "function"@en; - skos:definition "(Elucidation) A function is a disposition that exists in virtue of its bearer's physical make-up & this physical make-up is something the bearer possesses because it came into being either through evolution (in the case of natural biological entities) or through intentional design (in the case of artefacts) in order to realize processes of a certain sort"@en; - skos:example "The function of a hammer to drive in nails; the function of a heart pacemaker to regulate the beating of a heart through electricity"@en . - - a owl:Class; - rdfs:subClassOf , _:genid124, _:genid125, _:genid126, _:genid130; - dc11:identifier "084-BFO"; - rdfs:label "process boundary"@en; - skos:definition "p is a process boundary =Def p is a temporal part of a process & p has no proper temporal parts"@en; - skos:example "The boundary between the 2nd and 3rd year of your life"@en . - -_:genid124 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid125 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid126 a owl:Restriction; - owl:allValuesFrom _:genid127; - owl:onProperty . - -_:genid127 a owl:Class; - owl:unionOf _:genid129 . - -_:genid129 a rdf:List; - rdf:first ; - rdf:rest _:genid128 . - -_:genid128 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid130 a owl:Restriction; - owl:allValuesFrom _:genid131; - owl:onProperty . - -_:genid131 a owl:Class; - owl:unionOf _:genid133 . - -_:genid133 a rdf:List; - rdf:first ; - rdf:rest _:genid132 . - -_:genid132 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:Class; - rdfs:subClassOf , _:genid134, _:genid138; - owl:disjointWith ; - dc11:identifier "103-BFO"; - rdfs:label "one-dimensional temporal region"@en; - skos:definition "(Elucidation) A one-dimensional temporal region is a temporal region that is a whole that has a temporal interval and zero or more temporal intervals and temporal instants as parts"@en; - skos:example "The temporal region during which a process occurs"@en . - -_:genid134 a owl:Restriction; - owl:allValuesFrom _:genid135; - owl:onProperty . - -_:genid135 a owl:Class; - owl:unionOf _:genid137 . - -_:genid137 a rdf:List; - rdf:first ; - rdf:rest _:genid136 . - -_:genid136 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid138 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid139, _:genid140; - owl:disjointWith ; - dc11:identifier "019-BFO"; - rdfs:label "material entity"@en; - skos:definition "(Elucidation) A material entity is an independent continuant has some portion of matter as continuant part"@en; - skos:example "A human being; the undetached arm of a human being; an aggregate of human beings"@en . - -_:genid139 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid140 a owl:Restriction; - owl:allValuesFrom _:genid141; - owl:onProperty . - -_:genid141 a owl:Class; - owl:unionOf _:genid144 . - -_:genid144 a rdf:List; - rdf:first ; - rdf:rest _:genid143 . - -_:genid143 a rdf:List; - rdf:first ; - rdf:rest _:genid142 . - -_:genid142 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:Class; - rdfs:subClassOf , _:genid145, _:genid146; - dc11:identifier "029-BFO"; - rdfs:label "continuant fiat boundary"@en; - skos:definition "(Elucidation) A continuant fiat boundary b is an immaterial entity that is of zero, one or two dimensions & such that there is no time t when b has a spatial region as continuant part & whose location is determined in relation to some material entity"@en; - skos:example "As for fiat point, fiat line, fiat surface"@en . - -_:genid145 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - -_:genid146 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "028-BFO"; - rdfs:label "immaterial entity"@en; - skos:definition "b is an immaterial entity =Def b is an independent continuant which is such that there is no time t when it has a material entity as continuant part"@en; - skos:example "As for fiat point, fiat line, fiat surface, site"@en . - - a owl:Class; - rdfs:subClassOf , _:genid147; - dc11:identifier "032-BFO"; - rdfs:label "fiat line"@en; - skos:definition "(Elucidation) A fiat line is a one-dimensional continuant fiat boundary that is continuous"@en; - skos:example "The Equator; all geopolitical boundaries; all lines of latitude and longitude; the median sulcus of your tongue; the line separating the outer surface of the mucosa of the lower lip from the outer surface of the skin of the chin"@en . - -_:genid147 a owl:Restriction; - owl:allValuesFrom _:genid148; - owl:onProperty . - -_:genid148 a owl:Class; - owl:unionOf _:genid150 . - -_:genid150 a rdf:List; - rdf:first ; - rdf:rest _:genid149 . - -_:genid149 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "057-BFO"; - rdfs:label "relational quality"@en; - skos:definition "b is a relational quality =Def b is a quality & there exists c and d such that c and d are not identical & b specifically depends on c & b specifically depends on d"@en; - skos:example "A marriage bond; an instance of love; an obligation between one person and another"@en . - - a owl:Class; - rdfs:subClassOf , _:genid151 . - -_:genid151 owl:allValuesFrom . -# -# http://purl.obolibrary.org/obo/BFO_0000147 -# -# http://purl.obolibrary.org/obo/BFO_0000148 -# -# http://purl.obolibrary.org/obo/BFO_0000182 -# -# http://purl.obolibrary.org/obo/BFO_0000202 -# -# http://purl.obolibrary.org/obo/BFO_0000203 -# -# -# -# ################################################################# -# # -# # General axioms -# # -# ################################################################# -# -# -# -# -# -# -# -# Generated by the OWL API (version 4.5.29) https://github.com/owlcs/owlapi - -_:genid151 a owl:Restriction; - owl:onProperty . - - dc11:identifier "033-BFO"; - rdfs:label "fiat surface"@en; - skos:definition "(Elucidation) A fiat surface is a two-dimensional continuant fiat boundary that is self-connected"@en; - skos:example "The surface of the Earth; the plane separating the smoking from the non-smoking zone in a restaurant"@en . - - a owl:Class; - rdfs:subClassOf , _:genid152; - dc11:identifier "031-BFO"; - rdfs:label "fiat point"@en; - skos:definition "(Elucidation) A fiat point is a zero-dimensional continuant fiat boundary that consists of a single point"@en; - skos:example "The geographic North Pole; the quadripoint where the boundaries of Colorado, Utah, New Mexico and Arizona meet; the point of origin of some spatial coordinate system"@en . - -_:genid152 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf , _:genid153; - dc11:identifier "102-BFO"; - rdfs:label "zero-dimensional temporal region"@en; - skos:definition "(Elucidation) A zero-dimensional temporal region is a temporal region that is a whole consisting of one or more separated temporal instants as parts"@en; - skos:example "A temporal region that is occupied by a process boundary; the moment at which a finger is detached in an industrial accident"@en . - -_:genid153 a owl:Restriction; - owl:allValuesFrom ; - owl:onProperty . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "138-BFO"; - rdfs:label "history"@en; - skos:definition "(Elucidation) A history is a process that is the sum of the totality of processes taking place in the spatiotemporal region occupied by the material part of a material entity"@en; - skos:example "The life of an organism from the beginning to the end of its existence"@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "155-BFO"; - rdfs:label "temporal interval"@en; - skos:definition "(Elucidation) A temporal interval is a one-dimensional temporal region that is continuous, thus without gaps or breaks"@en; - skos:example "The year 2018."@en; - skos:scopeNote "A one-dimensional temporal region can include as parts not only temporal intervals but also temporal instants separated from other parts by gaps."@en . - - a owl:Class; - rdfs:subClassOf ; - dc11:identifier "209-BFO"; - rdfs:label "temporal instant"@en; - skos:definition "(Elucidation) A temporal instant is a zero-dimensional temporal region that has no proper temporal part"@en; - skos:example "The millennium"@en . - -_:genid154 a owl:AllDisjointClasses; - owl:members _:genid157 . - -_:genid157 a rdf:List; - rdf:first ; - rdf:rest _:genid156 . - -_:genid156 a rdf:List; - rdf:first ; - rdf:rest _:genid155 . - -_:genid155 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid158 a owl:AllDisjointClasses; - owl:members _:genid161 . - -_:genid161 a rdf:List; - rdf:first ; - rdf:rest _:genid160 . - -_:genid160 a rdf:List; - rdf:first ; - rdf:rest _:genid159 . - -_:genid159 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid162 a owl:AllDisjointClasses; - owl:members _:genid166 . - -_:genid166 a rdf:List; - rdf:first ; - rdf:rest _:genid165 . - -_:genid165 a rdf:List; - rdf:first ; - rdf:rest _:genid164 . - -_:genid164 a rdf:List; - rdf:first ; - rdf:rest _:genid163 . - -_:genid163 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid167 a owl:AllDisjointClasses; - owl:members _:genid171 . - -_:genid171 a rdf:List; - rdf:first ; - rdf:rest _:genid170 . - -_:genid170 a rdf:List; - rdf:first ; - rdf:rest _:genid169 . - -_:genid169 a rdf:List; - rdf:first ; - rdf:rest _:genid168 . - -_:genid168 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . - -_:genid172 a owl:AllDisjointClasses; - owl:members _:genid175 . - -_:genid175 a rdf:List; - rdf:first ; - rdf:rest _:genid174 . - -_:genid174 a rdf:List; - rdf:first ; - rdf:rest _:genid173 . - -_:genid173 a rdf:List; - rdf:first ; - rdf:rest rdf:nil . From a2fe9e67f72bf93c49458af6ec154ebcdd657c22 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 11 Sep 2024 16:49:00 +0200 Subject: [PATCH 081/146] handled the rdf conversion exception when the ontology isn't available as a raw file but IRI for #46 --- .../java/uk/ac/ebi/rdf2json/OntologyGraph.java | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 9bde2fd9d..b93626eee 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -35,12 +35,9 @@ import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; import org.semanticweb.owlapi.model.*; import javax.net.ssl.HttpsURLConnection; -import java.io.File; import java.io.*; import java.net.HttpURLConnection; -import java.io.FileInputStream; import java.net.URL; -import java.io.FileNotFoundException; import java.net.URLConnection; @@ -135,6 +132,7 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I OWLOntology ont = null; InputStream is = null; URLConnection con = null; + String originalUrl = url; boolean isParserException = false; try { boolean isRDF = true; @@ -166,8 +164,17 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I } catch (IOException e) { e.printStackTrace(); } - ont = ontManager.loadOntologyFromOntologyDocument(is); + try { + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e) { + isParserException = true; + } + } + + if(isParserException){ + ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(originalUrl)); } + OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); String lang1 = odf.getKey(); String ext = ".owl"; From ade66cf56af572764fc70a0a53ca9b5d87aa2528 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 11 Sep 2024 18:15:32 +0200 Subject: [PATCH 082/146] prevented double ontology assignment for #46 --- .../rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 1 + 1 file changed, 1 insertion(+) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index b93626eee..5dea1da83 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -166,6 +166,7 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I } try { ont = ontManager.loadOntologyFromOntologyDocument(is); + isParserException = false; } catch (Exception e) { isParserException = true; } From ba4931f84bd4a02f237700b45826c0a892d07496 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 12 Sep 2024 10:55:28 +0200 Subject: [PATCH 083/146] used approved url protocol for ontology without a raw file for #46 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 5dea1da83..f514d4817 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -132,7 +132,6 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I OWLOntology ont = null; InputStream is = null; URLConnection con = null; - String originalUrl = url; boolean isParserException = false; try { boolean isRDF = true; @@ -173,7 +172,7 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I } if(isParserException){ - ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(originalUrl)); + ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); } OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); From 4b1409cc97337d16a50c5ec97134472d98d2e336 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 13 Sep 2024 14:12:44 +0200 Subject: [PATCH 084/146] added an obo to owl conversion routine to rdf2json for #47 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 64 +++++++++++++++---- 1 file changed, 50 insertions(+), 14 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 29404c110..4385a86b0 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -3,6 +3,8 @@ import com.google.gson.stream.JsonWriter; import org.apache.jena.riot.RDFLanguages; +import org.obolibrary.robot.IOHelper; +import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; import uk.ac.ebi.rdf2json.annotators.*; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; import uk.ac.ebi.rdf2json.properties.*; @@ -73,7 +75,7 @@ private void parseRDF(String url, boolean convertToRDF) { try { if (loadLocalFiles && !url.contains("://")) { logger.debug("Using local file for {}", url); - sourceFileTimestamp = new File(url).lastModified(); + sourceFileTimestamp = new File(url).lastModified(); createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)) .source(new FileInputStream(url)).parse(this); } else { @@ -82,7 +84,7 @@ private void parseRDF(String url, boolean convertToRDF) { try { FileInputStream is = new FileInputStream(existingDownload); logger.debug("Using predownloaded file for {}", url); - sourceFileTimestamp = new File(existingDownload).lastModified(); + sourceFileTimestamp = new File(existingDownload).lastModified(); Lang lang = null; try { String existingDownloadMimeType = Files.readString(Paths.get(existingDownload + ".mimetype")); @@ -95,7 +97,7 @@ private void parseRDF(String url, boolean convertToRDF) { createParser(lang).source(is).parse(this); } catch (Exception e) { logger.error("Downloading (not predownloaded) {}", url); - sourceFileTimestamp = System.currentTimeMillis(); + sourceFileTimestamp = System.currentTimeMillis(); createParser(null).source(url).parse(this); } } else { @@ -105,15 +107,18 @@ private void parseRDF(String url, boolean convertToRDF) { OWLOntology ont = convertOntologyToRDF(url, outputFile); OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = odf.getKey(); + logger.info("language: "+lang1); String ext = ".owl"; if (lang1.contains("Turtle")) ext = ".ttl"; + else if (lang1.contains("OBO Format")) + ext = ".owl"; String fileNameInUrl = outputFile + ext; Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); url = Paths.get(resourceDirectory.resolve(fileNameInUrl).toUri()).toString(); } - - sourceFileTimestamp = System.currentTimeMillis(); + logger.info("url: "+url); + sourceFileTimestamp = System.currentTimeMillis(); createParser(null).source(url).parse(this); } } @@ -126,15 +131,13 @@ private void parseRDF(String url, boolean convertToRDF) { } } - private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { + private OWLOntology loadOntology(String url) throws IOException { OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); - FileOutputStream fos = null; OWLOntology ont = null; InputStream is = null; URLConnection con = null; boolean isParserException = false; try { - boolean isRDF = true; boolean isDefaultURLFailed = false; try { URL tempURL = new URL(url); @@ -174,14 +177,48 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I if(isParserException){ ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); } + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } finally { + if (is != null) + is.close(); + } + return ont; + } - OWLDocumentFormat odf = ontManager.getOntologyFormat(ont); + private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { + FileOutputStream fos = null; + OWLOntology ont = loadOntology(url); + try { + boolean isRDF = true; + OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = odf.getKey(); String ext = ".owl"; if (lang1.contains("Turtle")) ext = ".ttl"; + else if (lang1.contains("OBO Format")){ + ext = ".owl"; + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+ext).toString(); + + IOHelper iohelper = new IOHelper(); + iohelper.saveOntology(ont,new RDFXMLDocumentFormat(),IRI.create(new File(filePath)),true); + // below is the procedure to do this without robot and with net.sourceforge.owlapi only. + /*OWLAPIOwl2Obo converter = new OWLAPIOwl2Obo(ont.getOWLOntologyManager()); + OBODoc oboDoc = converter.convert(ont); + + OBOFormatWriter writer = new OBOFormatWriter(); + writer.setCheckStructure(true); + writer.write(oboDoc, new File(filePath)); + OWLDocumentFormat format = new RDFXMLDocumentFormat(); + ont.getOWLOntologyManager().saveOntology(ont, format, fos);*/ + logger.info("initial format: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); + ont = loadOntology("file:"+filePath); + + logger.info("converted to: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); + } + else if (!lang1.contains("RDF")) { - isRDF = false; OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); String filePath = resourceDirectory.resolve(outputFile+ext).toString(); @@ -196,22 +233,21 @@ else if (!lang1.contains("RDF")) { fos = new FileOutputStream(filePath); ont.saveOntology(fos); } - } catch (OWLOntologyCreationException e) { - e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } catch (OWLOntologyStorageException e) { e.printStackTrace(); } catch (URISyntaxException e) { throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); } finally { if (fos != null) fos.close(); - if (is != null) - is.close(); } return ont; } + private String replaceURLByProtocol(URLConnection con, String url) { if (con instanceof HttpsURLConnection) { url = url.replace("https:", "http:"); From 6f0a27f936272a5f310fb2bf9ebb9eed688ab004 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 13 Sep 2024 15:43:41 +0200 Subject: [PATCH 085/146] ensured structure of rdf ontologies and refactored for #47 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 52 +++++++++---------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 4385a86b0..06d26fcdd 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -194,51 +194,35 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); String lang1 = odf.getKey(); String ext = ".owl"; - if (lang1.contains("Turtle")) + if (lang1.contains("Turtle")){ + isRDF = false; ext = ".ttl"; - else if (lang1.contains("OBO Format")){ + fos = fileOutPutStreamForExecutionPath(outputFile+ext); + ont.saveOntology(fos); + } else if (lang1.contains("OBO Format")){ + isRDF = false; ext = ".owl"; Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); String filePath = resourceDirectory.resolve(outputFile+ext).toString(); - IOHelper iohelper = new IOHelper(); iohelper.saveOntology(ont,new RDFXMLDocumentFormat(),IRI.create(new File(filePath)),true); - // below is the procedure to do this without robot and with net.sourceforge.owlapi only. - /*OWLAPIOwl2Obo converter = new OWLAPIOwl2Obo(ont.getOWLOntologyManager()); - OBODoc oboDoc = converter.convert(ont); - - OBOFormatWriter writer = new OBOFormatWriter(); - writer.setCheckStructure(true); - writer.write(oboDoc, new File(filePath)); - OWLDocumentFormat format = new RDFXMLDocumentFormat(); - ont.getOWLOntologyManager().saveOntology(ont, format, fos);*/ logger.info("initial format: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); ont = loadOntology("file:"+filePath); - logger.info("converted to: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); - } - - else if (!lang1.contains("RDF")) { + } else if (!lang1.contains("RDF")) { OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - String filePath = resourceDirectory.resolve(outputFile+ext).toString(); - fos = new FileOutputStream(filePath); + fos = fileOutPutStreamForExecutionPath(outputFile+ext); ont.saveOntology(odf1, fos); } + if (isRDF) { - OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation() - .toURI().getPath(); - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - String filePath = resourceDirectory.resolve(outputFile+ext).toString(); - fos = new FileOutputStream(filePath); - ont.saveOntology(fos); + fos = fileOutPutStreamForExecutionPath(outputFile+ext); + ont.saveOntology(new RDFXMLDocumentFormat(),fos); } } catch (IOException e) { e.printStackTrace(); } catch (OWLOntologyStorageException e) { e.printStackTrace(); - } catch (URISyntaxException e) { - throw new RuntimeException(e); } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -248,6 +232,20 @@ else if (!lang1.contains("RDF")) { return ont; } + private FileOutputStream fileOutPutStreamForExecutionPath(String outputFile) { + FileOutputStream fos; + try { + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile).toString(); + fos = new FileOutputStream(filePath); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + return fos; + } + private String replaceURLByProtocol(URLConnection con, String url) { if (con instanceof HttpsURLConnection) { url = url.replace("https:", "http:"); From cd5b8c1e18a82715bf8a264ff36192b32f4474aa Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 13 Sep 2024 20:26:43 +0200 Subject: [PATCH 086/146] implemented save functionality of original and refactored ontologies and refactored for #47 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 67 +++++++------------ 1 file changed, 26 insertions(+), 41 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 06d26fcdd..6d3ba37f3 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -4,7 +4,7 @@ import org.apache.jena.riot.RDFLanguages; import org.obolibrary.robot.IOHelper; -import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; +import org.semanticweb.owlapi.formats.*; import uk.ac.ebi.rdf2json.annotators.*; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; import uk.ac.ebi.rdf2json.properties.*; @@ -34,7 +34,6 @@ import org.slf4j.LoggerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; -import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; import org.semanticweb.owlapi.model.*; import javax.net.ssl.HttpsURLConnection; import java.io.*; @@ -70,7 +69,7 @@ private RDFParserBuilder createParser(Lang lang) { } } - private void parseRDF(String url, boolean convertToRDF) { + private void parseRDF(String url, boolean convertToRDF, String id) { try { if (loadLocalFiles && !url.contains("://")) { @@ -103,19 +102,14 @@ private void parseRDF(String url, boolean convertToRDF) { } else { logger.debug("Downloading (no predownload path provided) {}", url); if (convertToRDF) { - String outputFile = "result"; - OWLOntology ont = convertOntologyToRDF(url, outputFile); - OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); - String lang1 = odf.getKey(); - logger.info("language: "+lang1); - String ext = ".owl"; - if (lang1.contains("Turtle")) + OWLOntology ont = convertOntologyToRDF(url, id); + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + logger.info("language: "+format.getKey()); + String ext = ".rdf"; + if (format instanceof TurtleDocumentFormat) ext = ".ttl"; - else if (lang1.contains("OBO Format")) - ext = ".owl"; - String fileNameInUrl = outputFile + ext; Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - url = Paths.get(resourceDirectory.resolve(fileNameInUrl).toUri()).toString(); + url = Paths.get(resourceDirectory.resolve(id+ext).toUri()).toString(); } logger.info("url: "+url); sourceFileTimestamp = System.currentTimeMillis(); @@ -190,34 +184,25 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I FileOutputStream fos = null; OWLOntology ont = loadOntology(url); try { - boolean isRDF = true; - OWLDocumentFormat odf = ont.getOWLOntologyManager().getOntologyFormat(ont); - String lang1 = odf.getKey(); - String ext = ".owl"; - if (lang1.contains("Turtle")){ - isRDF = false; - ext = ".ttl"; - fos = fileOutPutStreamForExecutionPath(outputFile+ext); - ont.saveOntology(fos); - } else if (lang1.contains("OBO Format")){ - isRDF = false; - ext = ".owl"; + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + if (format instanceof OBODocumentFormat){ Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - String filePath = resourceDirectory.resolve(outputFile+ext).toString(); + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+".obo"); + fos = getFileOutPutStreamForExecutionPath(outputFile+".obo"); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+".rdf"); + String filePath = resourceDirectory.resolve(outputFile+".rdf").toString(); IOHelper iohelper = new IOHelper(); iohelper.saveOntology(ont,new RDFXMLDocumentFormat(),IRI.create(new File(filePath)),true); - logger.info("initial format: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); ont = loadOntology("file:"+filePath); - logger.info("converted to: "+ont.getOWLOntologyManager().getOntologyFormat(ont)); - } else if (!lang1.contains("RDF")) { - OWLDocumentFormat odf1 = new OWLXMLDocumentFormat(); - fos = fileOutPutStreamForExecutionPath(outputFile+ext); - ont.saveOntology(odf1, fos); - } - - if (isRDF) { - fos = fileOutPutStreamForExecutionPath(outputFile+ext); - ont.saveOntology(new RDFXMLDocumentFormat(),fos); + } else { + String ext = (format instanceof TurtleDocumentFormat) ? ".ttl" : ".owl"; + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+ext); + fos = getFileOutPutStreamForExecutionPath(outputFile+ext); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+".rdf"); + fos = getFileOutPutStreamForExecutionPath(outputFile+".rdf"); + ont.saveOntology(new RDFXMLDocumentFormat(), fos); } } catch (IOException e) { e.printStackTrace(); @@ -232,7 +217,7 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I return ont; } - private FileOutputStream fileOutPutStreamForExecutionPath(String outputFile) { + private FileOutputStream getFileOutPutStreamForExecutionPath(String outputFile) { FileOutputStream fos; try { Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); @@ -304,7 +289,7 @@ private String urlToFilename(String url) { } logger.debug("load ontology from: {}", url); - parseRDF(url, convertToRDF); + parseRDF(url, convertToRDF, config.getOrDefault("id","result").toString()); // Before we evaluate imports, mark all the nodes so far as not imported for(String id : nodes.keySet()) { @@ -320,7 +305,7 @@ private String urlToFilename(String url) { importUrls.remove(0); logger.debug("import: {}", importUrl); - parseRDF(importUrl, convertToRDF); + parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()); } // Now the imports are done, mark everything else as imported From 6560381adffc1d9cda00297fe87d15dda602239a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 13 Sep 2024 21:10:17 +0200 Subject: [PATCH 087/146] ensured always the converted rdf to be parsed and refactored the exceptions for #47 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 42 ++++++------------- 1 file changed, 13 insertions(+), 29 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 6d3ba37f3..947ca2e22 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -104,12 +104,9 @@ private void parseRDF(String url, boolean convertToRDF, String id) { if (convertToRDF) { OWLOntology ont = convertOntologyToRDF(url, id); OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); - logger.info("language: "+format.getKey()); - String ext = ".rdf"; - if (format instanceof TurtleDocumentFormat) - ext = ".ttl"; + logger.info("parsing "+id+" ontology in format: "+format.getKey()); Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - url = Paths.get(resourceDirectory.resolve(id+ext).toUri()).toString(); + url = Paths.get(resourceDirectory.resolve(id+".rdf").toUri()).toString(); } logger.info("url: "+url); sourceFileTimestamp = System.currentTimeMillis(); @@ -127,50 +124,34 @@ private void parseRDF(String url, boolean convertToRDF, String id) { private OWLOntology loadOntology(String url) throws IOException { OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); - OWLOntology ont = null; + OWLOntology ont; InputStream is = null; URLConnection con = null; - boolean isParserException = false; try { - boolean isDefaultURLFailed = false; try { URL tempURL = new URL(url); con = tempURL.openConnection(); is = tempURL.openStream(); } catch (IOException e) { - isDefaultURLFailed = true; - } - if (isDefaultURLFailed) { url = replaceURLByProtocol(con, url); try { is = new URL(url).openStream(); - } catch (IOException e) { - e.printStackTrace(); + } catch (IOException ioe) { + ioe.printStackTrace(); } } + try { ont = ontManager.loadOntologyFromOntologyDocument(is); } catch (Exception e) { - isParserException = true; - } - if (isParserException) { url = replaceURLByProtocol(con, url); try { is = new URL(url).openStream(); - } catch (IOException e) { - e.printStackTrace(); - } - try { ont = ontManager.loadOntologyFromOntologyDocument(is); - isParserException = false; - } catch (Exception e) { - isParserException = true; + } catch (IOException ioe) { + ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); } } - - if(isParserException){ - ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); - } } catch (OWLOntologyCreationException e) { throw new RuntimeException(e); } finally { @@ -203,11 +184,14 @@ private OWLOntology convertOntologyToRDF(String url, String outputFile) throws I logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+".rdf"); fos = getFileOutPutStreamForExecutionPath(outputFile+".rdf"); ont.saveOntology(new RDFXMLDocumentFormat(), fos); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+".rdf").toString(); + ont = loadOntology("file:"+filePath); } } catch (IOException e) { - e.printStackTrace(); + throw new RuntimeException(e); } catch (OWLOntologyStorageException e) { - e.printStackTrace(); + throw new RuntimeException(e); } catch (Exception e) { throw new RuntimeException(e); } finally { From 12ee41590bd931acf5111ba970ebbb4f53e12488 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 16 Sep 2024 13:22:19 +0200 Subject: [PATCH 088/146] refactored ontology conversion to detect source ontology type and do the conversion to rdf accordingly for #56 --- .../ac/ebi/rdf2json/OntologyConversion.java | 172 ++++++++++++++++++ .../uk/ac/ebi/rdf2json/OntologyGraph.java | 107 +---------- 2 files changed, 175 insertions(+), 104 deletions(-) create mode 100644 dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java new file mode 100644 index 000000000..91632ae9d --- /dev/null +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java @@ -0,0 +1,172 @@ +package uk.ac.ebi.rdf2json; + +import org.obolibrary.robot.IOHelper; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.formats.*; +import org.semanticweb.owlapi.model.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.HttpsURLConnection; +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class OntologyConversion { + private static final Logger logger = LoggerFactory.getLogger(OntologyConversion.class); + + private OWLOntology ontology; + + private String extOriginal; + + private String extConverted; + + public OntologyConversion(String url, String id, OWLDocumentFormat convertedFormat) throws IOException { + convertOntologyToRDF(url,id,convertedFormat); + } + + public OWLOntology getOntology() { + return ontology; + } + + public String getExtOriginal() { + return extOriginal; + } + + public String getExtConverted() { + return extConverted; + } + + private void convertOntologyToRDF(String url, String outputFile, OWLDocumentFormat convertedFormat) throws IOException { + FileOutputStream fos = null; + OWLOntology ont = loadOntology(url); + try { + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + extOriginal = getExtension(format); + extConverted = getExtension(convertedFormat); + if (extOriginal.equals(extConverted)){ + extOriginal = extOriginal+"1"; + extConverted = extConverted+"2"; + } + if (format instanceof OBODocumentFormat){ + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+extOriginal); + fos = getFileOutPutStreamForExecutionPath(outputFile+extOriginal); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+extConverted); + String filePath = resourceDirectory.resolve(outputFile+extConverted).toString(); + IOHelper iohelper = new IOHelper(); + iohelper.saveOntology(ont,convertedFormat, IRI.create(new File(filePath)),true); + ont = loadOntology("file:"+filePath); + } else { + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+extOriginal); + fos = getFileOutPutStreamForExecutionPath(outputFile+extOriginal); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+extConverted); + fos = getFileOutPutStreamForExecutionPath(outputFile+extConverted); + ont.saveOntology(new RDFXMLDocumentFormat(), fos); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+extConverted).toString(); + ont = loadOntology("file:"+filePath); + } + } catch (IOException e) { + throw new RuntimeException(e); + } catch (OWLOntologyStorageException e) { + throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + if (fos != null) + fos.close(); + } + ontology = ont; + } + + private OWLOntology loadOntology(String url) throws IOException { + OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); + OWLOntology ont; + InputStream is = null; + URLConnection con = null; + try { + try { + URL tempURL = new URL(url); + con = tempURL.openConnection(); + is = tempURL.openStream(); + } catch (IOException e) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } + } + + try { + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (IOException ioe) { + ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); + } + } + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } finally { + if (is != null) + is.close(); + } + return ont; + } + + private FileOutputStream getFileOutPutStreamForExecutionPath(String outputFile) { + FileOutputStream fos; + try { + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile).toString(); + fos = new FileOutputStream(filePath); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + return fos; + } + + private String replaceURLByProtocol(URLConnection con, String url) { + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + return url; + } + + private String getExtension(OWLDocumentFormat format) throws IllegalArgumentException { + String ext = ".txt"; + if (format instanceof OBODocumentFormat) + ext = ".obo"; + else if (format instanceof RDFXMLDocumentFormat) + ext = ".owl"; + else if (format instanceof TurtleDocumentFormat) + ext = ".ttl"; + else if (format instanceof OWLXMLDocumentFormat) + ext = ".owx"; + else if (format instanceof ManchesterSyntaxDocumentFormat) + ext = ".omn"; + else if (format instanceof FunctionalSyntaxDocumentFormat) + ext = ".ofn"; + return ext; + } +} diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 947ca2e22..cccbe9847 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -102,11 +102,12 @@ private void parseRDF(String url, boolean convertToRDF, String id) { } else { logger.debug("Downloading (no predownload path provided) {}", url); if (convertToRDF) { - OWLOntology ont = convertOntologyToRDF(url, id); + OntologyConversion conversion = new OntologyConversion(url, id, new RDFXMLDocumentFormat()); + OWLOntology ont = conversion.getOntology(); OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); logger.info("parsing "+id+" ontology in format: "+format.getKey()); Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - url = Paths.get(resourceDirectory.resolve(id+".rdf").toUri()).toString(); + url = Paths.get(resourceDirectory.resolve(id+conversion.getExtConverted()).toUri()).toString(); } logger.info("url: "+url); sourceFileTimestamp = System.currentTimeMillis(); @@ -122,108 +123,6 @@ private void parseRDF(String url, boolean convertToRDF, String id) { } } - private OWLOntology loadOntology(String url) throws IOException { - OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); - OWLOntology ont; - InputStream is = null; - URLConnection con = null; - try { - try { - URL tempURL = new URL(url); - con = tempURL.openConnection(); - is = tempURL.openStream(); - } catch (IOException e) { - url = replaceURLByProtocol(con, url); - try { - is = new URL(url).openStream(); - } catch (IOException ioe) { - ioe.printStackTrace(); - } - } - - try { - ont = ontManager.loadOntologyFromOntologyDocument(is); - } catch (Exception e) { - url = replaceURLByProtocol(con, url); - try { - is = new URL(url).openStream(); - ont = ontManager.loadOntologyFromOntologyDocument(is); - } catch (IOException ioe) { - ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); - } - } - } catch (OWLOntologyCreationException e) { - throw new RuntimeException(e); - } finally { - if (is != null) - is.close(); - } - return ont; - } - - private OWLOntology convertOntologyToRDF(String url, String outputFile) throws IOException { - FileOutputStream fos = null; - OWLOntology ont = loadOntology(url); - try { - OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); - if (format instanceof OBODocumentFormat){ - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+".obo"); - fos = getFileOutPutStreamForExecutionPath(outputFile+".obo"); - ont.saveOntology(format, fos); - logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+".rdf"); - String filePath = resourceDirectory.resolve(outputFile+".rdf").toString(); - IOHelper iohelper = new IOHelper(); - iohelper.saveOntology(ont,new RDFXMLDocumentFormat(),IRI.create(new File(filePath)),true); - ont = loadOntology("file:"+filePath); - } else { - String ext = (format instanceof TurtleDocumentFormat) ? ".ttl" : ".owl"; - logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+ext); - fos = getFileOutPutStreamForExecutionPath(outputFile+ext); - ont.saveOntology(format, fos); - logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+".rdf"); - fos = getFileOutPutStreamForExecutionPath(outputFile+".rdf"); - ont.saveOntology(new RDFXMLDocumentFormat(), fos); - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - String filePath = resourceDirectory.resolve(outputFile+".rdf").toString(); - ont = loadOntology("file:"+filePath); - } - } catch (IOException e) { - throw new RuntimeException(e); - } catch (OWLOntologyStorageException e) { - throw new RuntimeException(e); - } catch (Exception e) { - throw new RuntimeException(e); - } finally { - if (fos != null) - fos.close(); - } - return ont; - } - - private FileOutputStream getFileOutPutStreamForExecutionPath(String outputFile) { - FileOutputStream fos; - try { - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - String filePath = resourceDirectory.resolve(outputFile).toString(); - fos = new FileOutputStream(filePath); - } catch (FileNotFoundException e) { - throw new RuntimeException(e); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } - return fos; - } - - private String replaceURLByProtocol(URLConnection con, String url) { - if (con instanceof HttpsURLConnection) { - url = url.replace("https:", "http:"); - } else if (con instanceof HttpURLConnection) { - url = url.replace("http:", "https:"); - } - return url; - } - private String urlToFilename(String url) { return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); } From 4bef7858c9b250113721b07960c591d4652ade01 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 16 Sep 2024 13:40:34 +0200 Subject: [PATCH 089/146] implemented automatic conversion to RDF option upon ontology parsing error for #56 --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 32 +++++++++++++------ 1 file changed, 22 insertions(+), 10 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index cccbe9847..820147baf 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -101,17 +101,29 @@ private void parseRDF(String url, boolean convertToRDF, String id) { } } else { logger.debug("Downloading (no predownload path provided) {}", url); - if (convertToRDF) { - OntologyConversion conversion = new OntologyConversion(url, id, new RDFXMLDocumentFormat()); - OWLOntology ont = conversion.getOntology(); - OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); - logger.info("parsing "+id+" ontology in format: "+format.getKey()); - Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); - url = Paths.get(resourceDirectory.resolve(id+conversion.getExtConverted()).toUri()).toString(); + try { + logger.info("url: "+url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } catch (Exception e){ + logger.error("Parsing exception: {}",e.getMessage()); + if(convertToRDF){ + logger.info("converting the ontology to RDF alternatively"); + OntologyConversion conversion = new OntologyConversion(url, id, new RDFXMLDocumentFormat()); + OWLOntology ont = conversion.getOntology(); + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + logger.info("parsing "+id+" ontology in format: "+format.getKey()); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + url = Paths.get(resourceDirectory.resolve(id+conversion.getExtConverted()).toUri()).toString(); + logger.info("url of the converted ontology: "+url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } else { + logger.debug("You may alternatively try to use convertToRDF mode to parse your ontology"); + e.printStackTrace(); + } + } - logger.info("url: "+url); - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); } } } catch (FileNotFoundException e) { From 3c78d8df8417141dcb93d80b3ad1af080acb475b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 23 Sep 2024 18:01:23 +0200 Subject: [PATCH 090/146] stored converted imports in different files and handled exceptions for #56 --- .../ac/ebi/rdf2json/OntologyConversion.java | 2 +- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 20 +++++++++++-------- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java index 91632ae9d..98ce26444 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java @@ -117,7 +117,7 @@ private OWLOntology loadOntology(String url) throws IOException { try { is = new URL(url).openStream(); ont = ontManager.loadOntologyFromOntologyDocument(is); - } catch (IOException ioe) { + } catch (Exception e2) { ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 820147baf..367970b48 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -3,7 +3,6 @@ import com.google.gson.stream.JsonWriter; import org.apache.jena.riot.RDFLanguages; -import org.obolibrary.robot.IOHelper; import org.semanticweb.owlapi.formats.*; import uk.ac.ebi.rdf2json.annotators.*; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; @@ -33,13 +32,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.model.*; -import javax.net.ssl.HttpsURLConnection; -import java.io.*; -import java.net.HttpURLConnection; -import java.net.URL; -import java.net.URLConnection; public class OntologyGraph implements StreamRDF { @@ -139,6 +132,17 @@ private String urlToFilename(String url) { return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); } + public static String removeExtension(String fileName) { + if (fileName == null || fileName.isEmpty()) { + return fileName; + } + int lastDotIndex = fileName.lastIndexOf('.'); + if (lastDotIndex == -1) { + return fileName; // No extension found + } + return fileName.substring(0, lastDotIndex); + } + private boolean loadLocalFiles; @@ -200,7 +204,7 @@ private String urlToFilename(String url) { importUrls.remove(0); logger.debug("import: {}", importUrl); - parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()); + parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); } // Now the imports are done, mark everything else as imported From c839786dcc6a20f3e5e16759b677c77a83973661 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 24 Sep 2024 22:40:53 +0200 Subject: [PATCH 091/146] made correction in displaycsv numbers for #44 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 8ff783cf1..b7fc957fc 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -114,7 +114,9 @@ public static void displayCSV(List files) throws IOException { if (file.getName().endsWith("_edges.csv")){ try { Path path = Paths.get(file.getAbsolutePath()); - noofRelationships = Files.lines(path).count() -1; + int noofRecords = (int) Files.lines(path).count() - 1; + noofRelationships += noofRecords; + System.out.println(noofRecords+" records has been identified in "+file.getName()); } catch (Exception e) { e.printStackTrace(); } @@ -127,6 +129,8 @@ public static void displayCSV(List files) throws IOException { noofNodes += noofRecords; if (noofRecords != noofNewLines) System.out.println("Warning: "+noofRecords+" records has been identified in contrast to "+noofNewLines+" new lines in "+file.getName()); + else + System.out.println(noofRecords+" records has been identified in "+file.getName()); } } System.out.println("Total number of nodes that will be ingested in csv: " + noofNodes); From 04b7dcd4f6f51905ba1dc633e6346311fa998a74 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 25 Sep 2024 12:19:51 +0200 Subject: [PATCH 092/146] provided more accurate data and put latccountdown to the final reachable place in runners for #44 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 6 +++++- .../uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java | 9 +++++++-- .../ebi/spot/csv2neo/RelationShipCreationQueryTask.java | 7 ++++++- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index b7fc957fc..af297d113 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -106,7 +106,8 @@ public static void displayIngested(File file, Driver driver) throws IOException System.out.println("Total number of ingested nodes is "+noofRelationships); } - public static void displayCSV(List files) throws IOException { + public static Map displayCSV(List files) throws IOException { + Map records = new HashMap(); System.out.println("---Ingestion Plan---"); long noofRelationships = 0; long noofNodes = 0; @@ -115,6 +116,7 @@ public static void displayCSV(List files) throws IOException { try { Path path = Paths.get(file.getAbsolutePath()); int noofRecords = (int) Files.lines(path).count() - 1; + records.put(file.getName(),noofRecords); noofRelationships += noofRecords; System.out.println(noofRecords+" records has been identified in "+file.getName()); } catch (Exception e) { @@ -126,6 +128,7 @@ public static void displayCSV(List files) throws IOException { org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); int noofRecords = csvParser.getRecords().size(); int noofNewLines = (int) Files.lines(path).count() -1; + records.put(file.getName(),noofRecords); noofNodes += noofRecords; if (noofRecords != noofNewLines) System.out.println("Warning: "+noofRecords+" records has been identified in contrast to "+noofNewLines+" new lines in "+file.getName()); @@ -135,6 +138,7 @@ public static void displayCSV(List files) throws IOException { } System.out.println("Total number of nodes that will be ingested in csv: " + noofNodes); System.out.println("Total Number of relationships that will be ingested in csv: " + noofRelationships); + return records; } public static List> splitList(List list, int batchSize) { diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java index aebd96451..fad060887 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java @@ -20,7 +20,7 @@ public class NodeCreationQueryTask implements Runnable { private final Driver driver; - private final CountDownLatch latch; + private CountDownLatch latch; private final List records; private final String[] headers; private final File file; @@ -50,11 +50,16 @@ public void run() { } return true; }); - latch.countDown(); } } catch(Exception e) { System.out.println("Attempt "+i+" error: "+e.getMessage()); } } + latch.countDown(); + System.out.println("There are "+latch.getCount()+" remaining node batches."); + if (success) + System.out.println(records.size()+" nodes has been successfully added from "+file.getName()); + else + System.out.println("Warning: "+records.size()+" nodes failed to be added from "+file.getName()); } } diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java index 96b07d1be..4b69cae1c 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java @@ -47,11 +47,16 @@ public void run() { } return true; }); - latch.countDown(); } } catch(Exception e){ System.out.println("Attempt "+i+" error: "+e.getMessage()); } } + latch.countDown(); + System.out.println("There are "+latch.getCount()+" remaining relationship batches."); + if (success) + System.out.println(records.size()+" relationships has been successfully added from "+file.getName()); + else + System.out.println("Warning: "+records.size()+" relationships failed to be added from "+file.getName()); } } From a3299712d34c75b8e05a0b4c6f8260fbfda7045f Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:23:43 +0200 Subject: [PATCH 093/146] displayed differences between planned and ingested for #44 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 64 ++++++++++++------- 1 file changed, 42 insertions(+), 22 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index af297d113..48b7240a8 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -11,6 +11,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import java.util.stream.Collectors; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; @@ -80,30 +81,43 @@ public static void executeBatchedRelationshipQueries(List files, Driver dr } /* - * File should be the _ontologies.csv file + * Files should be the _ontologies.csv files * */ - public static void displayIngested(File file, Driver driver) throws IOException { + public static Map displayIngested(List files, Driver driver) throws IOException { System.out.println("---Ingestion Summary---"); - long noofRelationships = 0; - long noofNodes = 0; - Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); - org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); - List records = csvParser.getRecords(); - for (CSVRecord record : records){ - try (Session session = driver.session()){ - String ontology = record.get(0).split("\\+")[0]; - var resultN = session.run(countAllNodesOfOntology(ontology)); - int nodes = resultN.next().get("nodes").asInt(); - noofNodes += nodes; - System.out.println("Number of nodes in ontology "+ontology+" is "+nodes); - var resultR = session.run(countAllRelationshipsOfOntology(ontology)); - int relationships = resultR.next().get("relationships").asInt(); - noofRelationships += relationships; - System.out.println("Number of relationships in ontology "+ontology+" is "+relationships); + Map countRecords = new HashMap(); + for (File file : files){ + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + List records = csvParser.getRecords(); + for (CSVRecord record : records){ + try (Session session = driver.session()){ + String ontology = record.get(0).split("\\+")[0]; + var resultN = session.run(countNodesOfOntology(ontology,"ontology")); + int nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_ontologies.csv",nodes); + System.out.println(nodes+" ontologies are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"property")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_properties.csv",nodes); + System.out.println(nodes+" properties are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"individual")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_individuals.csv",nodes); + System.out.println(nodes+" individuals are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"class")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_classes.csv",nodes); + System.out.println(nodes+" classes are ingested from "+ontology); + var resultR = session.run(countAllRelationshipsOfOntology(ontology)); + int relationships = resultR.next().get("relationships").asInt(); + countRecords.put(ontology+"_relationships.csv",relationships); + System.out.println(relationships+" relationships are ingested from "+ontology); + } } + } - System.out.println("Total number of ingested nodes is "+noofNodes); - System.out.println("Total number of ingested nodes is "+noofRelationships); + return countRecords; } public static Map displayCSV(List files) throws IOException { @@ -210,10 +224,16 @@ public static void main(String... args) throws IOException, ParseException { if (cmd.getOptionValue("m").equals("i")){ File dir = new File(directory); List files = listFiles(dir.listFiles()); - displayCSV(files); + Map planned = displayCSV(files); executeBatchedNodeQueries(files,driver,batchSize,poolSize,attempts); executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); - displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).findFirst().get(),driver); + Map ingested = displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).collect(Collectors.toUnmodifiableList()), driver); + + Set keys = planned.keySet(); + keys.addAll(ingested.keySet()); + for (String key : keys){ + System.out.println("Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); + } } else if (cmd.getOptionValue("m").equals("rm")){ for(String ontology : ontologyPrefixes.split(",")){ try { From f4f38f44c04605e1b5ae949ead15238600de0bf4 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:44:35 +0200 Subject: [PATCH 094/146] fixed UnsupportedOperationException for #44 --- .../src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 48b7240a8..465818765 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -229,9 +229,10 @@ public static void main(String... args) throws IOException, ParseException { executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); Map ingested = displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).collect(Collectors.toUnmodifiableList()), driver); - Set keys = planned.keySet(); - keys.addAll(ingested.keySet()); - for (String key : keys){ + Set keysP = planned.keySet(); + Set keysI = ingested.keySet(); + keysP.addAll(keysI); + for (String key : keysP){ System.out.println("Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); } } else if (cmd.getOptionValue("m").equals("rm")){ From 402065a6766bace89f50f679376ec44708c0bed3 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 27 Sep 2024 15:41:53 +0200 Subject: [PATCH 095/146] fixed plan and result calculation for #44 --- .../java/uk/ac/ebi/spot/csv2neo/ImportCSV.java | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index 465818765..d219afad7 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -111,8 +111,8 @@ public static Map displayIngested(List files, Driver drive System.out.println(nodes+" classes are ingested from "+ontology); var resultR = session.run(countAllRelationshipsOfOntology(ontology)); int relationships = resultR.next().get("relationships").asInt(); - countRecords.put(ontology+"_relationships.csv",relationships); - System.out.println(relationships+" relationships are ingested from "+ontology); + countRecords.put(ontology+"_edges.csv",relationships); + System.out.println(relationships+" edges are ingested from "+ontology); } } @@ -229,11 +229,11 @@ public static void main(String... args) throws IOException, ParseException { executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); Map ingested = displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).collect(Collectors.toUnmodifiableList()), driver); - Set keysP = planned.keySet(); - Set keysI = ingested.keySet(); - keysP.addAll(keysI); - for (String key : keysP){ - System.out.println("Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); + Set keys = new HashSet<>(); + keys.addAll(planned.keySet()); + keys.addAll(ingested.keySet()); + for (String key : keys){ + System.out.println("For Key: "+key+" - Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); } } else if (cmd.getOptionValue("m").equals("rm")){ for(String ontology : ontologyPrefixes.split(",")){ From f750ed2a113ce1f918c6371ebcdb0df145abc245 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 27 Sep 2024 17:08:23 +0200 Subject: [PATCH 096/146] implemented multiple removal options for #44 --- .../uk/ac/ebi/spot/csv2neo/ImportCSV.java | 41 ++++++++++++++++--- .../ac/ebi/spot/csv2neo/QueryGeneration.java | 16 +++++++- 2 files changed, 50 insertions(+), 7 deletions(-) diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java index d219afad7..70ba95db6 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -163,6 +163,18 @@ public static List> splitList(List list, int batchSize) { return subLists; } + public static int deleteFromSession(Session session, String deletionQuery){ + int deletedCount = 0; + try { + System.out.println(deletionQuery); + var resultN = session.run(deletionQuery); + deletedCount = resultN.next().get("deletedCount").asInt(); + } catch (Exception e){ + e.printStackTrace(); + } + return deletedCount; + } + private static Options getOptions() { Options options = new Options(); options.addOption("m", "mode",true, "ingest(i), remove(rm) or display(d) ontologies"); @@ -176,6 +188,8 @@ private static Options getOptions() { options.addOption("bs", "batch_size",true, "batch size for splitting queries into multiple transactions."); options.addOption("ps", "pool_size",true, "number of threads in the pool"); options.addOption("t", "attempts",true, "number of attempts for a particular batch"); + options.addOption("l", "limit",true, "number of nodes to be removed"); + options.addOption("lb", "label",true, "node label filter for removal"); return options; } @@ -192,6 +206,8 @@ public static void main(String... args) throws IOException, ParseException { final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; final int poolSize = cmd.hasOption("ps") && Integer.parseInt(cmd.getOptionValue("ps"))>0 ? Integer.parseInt(cmd.getOptionValue("ps")) : 20; final int attempts = cmd.hasOption("t") ? Integer.parseInt(cmd.getOptionValue("t")) : 5; + final int limit = cmd.hasOption("l") ? Integer.parseInt(cmd.getOptionValue("l")) : 1000; + final String label = cmd.hasOption("lb") ? cmd.getOptionValue("lb") : "OntologyEntity"; try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { driver.verifyConnectivity(); @@ -228,7 +244,6 @@ public static void main(String... args) throws IOException, ParseException { executeBatchedNodeQueries(files,driver,batchSize,poolSize,attempts); executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); Map ingested = displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).collect(Collectors.toUnmodifiableList()), driver); - Set keys = new HashSet<>(); keys.addAll(planned.keySet()); keys.addAll(ingested.keySet()); @@ -236,11 +251,25 @@ public static void main(String... args) throws IOException, ParseException { System.out.println("For Key: "+key+" - Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); } } else if (cmd.getOptionValue("m").equals("rm")){ - for(String ontology : ontologyPrefixes.split(",")){ - try { - session.run(generateOntologyDeleteQuery(ontology)); - } catch (Exception e){ - e.printStackTrace(); + if (!cmd.hasOption("l") && !cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else if (cmd.hasOption("l") && !cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,limit)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else if (!cmd.hasOption("l") && cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,label)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else { + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,label,limit)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); } } } else if (cmd.getOptionValue("m").equals("d")){ diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java index b18053072..c4021e6d3 100644 --- a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -67,7 +67,21 @@ public static String generateRelationCreationQuery(String[] titles, String[] val } public static String generateOntologyDeleteQuery(String ontologyPrefix){ - return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' DETACH DELETE n"; + // Computationally demanding version that show deleted relationships as well as deleted nodes. + // MATCH (n) WHERE n.id STARTS WITH 'ontologyPrefix' OPTIONAL MATCH (n)-[r]-() WITH n, collect(r) AS relationships DETACH DELETE n RETURN COUNT(n) AS deletedCount, relationships + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, String label){ + return "MATCH (n:`"+label+"`) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, int limit){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n SKIP 0 LIMIT "+limit+" DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, String label, int limit){ + return "MATCH (n:`"+label+"`) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n SKIP 0 LIMIT "+limit+" DETACH DELETE n RETURN COUNT(n) AS deletedCount"; } public static String countAllRelationshipsOfOntology(String ontologyPrefix) { From f4062a878e278f4023065c07afb3fab351085d4f Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sat, 28 Sep 2024 13:34:13 +0200 Subject: [PATCH 097/146] detected self importing IRIs that cause a loop for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 367970b48..c31a5935c 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -200,11 +200,14 @@ public static String removeExtension(String fileName) { while(importUrls.size() > 0) { + int size = importUrls.size(); String importUrl = importUrls.get(0); importUrls.remove(0); logger.debug("import: {}", importUrl); parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); + if (importUrl.equals(importUrls.get(0)) && size == importUrls.size()) + break; } // Now the imports are done, mark everything else as imported From 3594f499e70a75755724c4238052c8426ff8e547 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sat, 28 Sep 2024 13:53:13 +0200 Subject: [PATCH 098/146] simplified loop detection for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index c31a5935c..aa1b3ab12 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -200,14 +200,13 @@ public static String removeExtension(String fileName) { while(importUrls.size() > 0) { - int size = importUrls.size(); String importUrl = importUrls.get(0); importUrls.remove(0); logger.debug("import: {}", importUrl); parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); - if (importUrl.equals(importUrls.get(0)) && size == importUrls.size()) - break; + if (importUrl.equals(importUrls.get(0))) + importUrls.remove(0); } // Now the imports are done, mark everything else as imported From e3d54c9ca0d570cd4ab3dd4071ce4e324d62374c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sat, 28 Sep 2024 14:00:33 +0200 Subject: [PATCH 099/146] ensured importUrls list size in loop detection for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index aa1b3ab12..abfb8fbaa 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -205,7 +205,7 @@ public static String removeExtension(String fileName) { logger.debug("import: {}", importUrl); parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); - if (importUrl.equals(importUrls.get(0))) + if (importUrls.size() > 0 && importUrl.equals(importUrls.get(0))) importUrls.remove(0); } From c6ada35061ca91b0ec7df183324bc13424031d48 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sat, 28 Sep 2024 19:03:50 +0200 Subject: [PATCH 100/146] contains check instead of equality check in loop detection for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index abfb8fbaa..a987173a5 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -205,8 +205,8 @@ public static String removeExtension(String fileName) { logger.debug("import: {}", importUrl); parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); - if (importUrls.size() > 0 && importUrl.equals(importUrls.get(0))) - importUrls.remove(0); + if (importUrls.size() > 0 && importUrls.contains(importUrl)) + importUrls.remove(importUrl); } // Now the imports are done, mark everything else as imported From 0198c76f5084b993a90a06d85348d85cf115f036 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sun, 29 Sep 2024 12:45:30 +0200 Subject: [PATCH 101/146] ensured RDFXML format in convertToRDF option for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index a987173a5..9f9386ad4 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -110,7 +110,7 @@ private void parseRDF(String url, boolean convertToRDF, String id) { url = Paths.get(resourceDirectory.resolve(id+conversion.getExtConverted()).toUri()).toString(); logger.info("url of the converted ontology: "+url); sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); + createParser(Lang.RDFXML).source(url).parse(this); } else { logger.debug("You may alternatively try to use convertToRDF mode to parse your ontology"); e.printStackTrace(); From 6f87627a82ba8f8af93d856a8588e5fc66eba12b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 30 Sep 2024 15:17:05 +0200 Subject: [PATCH 102/146] generalized detection of import loops for #63 --- .../src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index 9f9386ad4..c63c41dc2 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -199,14 +199,16 @@ public static String removeExtension(String fileName) { } + List imported = new ArrayList<>(); while(importUrls.size() > 0) { String importUrl = importUrls.get(0); importUrls.remove(0); + if (imported.contains(importUrl)) + continue; logger.debug("import: {}", importUrl); parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); - if (importUrls.size() > 0 && importUrls.contains(importUrl)) - importUrls.remove(importUrl); + imported.add(importUrl); } // Now the imports are done, mark everything else as imported From ab23684cd5e1c92f767b4d9e02095760b1b8a96d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 30 Sep 2024 16:43:56 +0200 Subject: [PATCH 103/146] avoided null property value for #63 --- .../ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java index 56236f303..3fe9a43aa 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java @@ -52,13 +52,13 @@ public static void annotateDisjointWith(OntologyGraph graph) { } } } - + } else if (c.types.contains(OntologyNode.NodeType.ALL_DIFFERENT)) { PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); - List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); if (membersList != null) { - List individualNodes = members.stream() + List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + List individualNodes = members.stream() .map(val -> graph.getNodeForPropertyValue(val)) .filter(val -> val != null) .collect(Collectors.toList()); From 4e637f2e4cb5a6365e1f80171e81ed0e4035646b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 2 Oct 2024 19:22:37 +0200 Subject: [PATCH 104/146] debug the cause of relation loop --- .../uk/ac/ebi/rdf2json/OntologyGraph.java | 22 ++++++---- .../rdf2json/annotators/RelatedAnnotator.java | 40 +++++++++++++------ 2 files changed, 42 insertions(+), 20 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index c63c41dc2..dbad3962d 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -436,9 +436,9 @@ public void write(JsonWriter writer) throws Throwable { writer.endObject(); - } catch (Throwable t) { - logger.error("Error in writing ontology with id = {}", ontologyId, t); - throw t; + } catch (Exception e) { + logger.error("Error in writing ontology with id = {}", ontologyId); + e.printStackTrace(); } } @@ -610,8 +610,16 @@ public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable if (uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { // special case for rdfs:Datatype; nest it as with a bnode instead of referencing writeNode(writer, uriNode, Set.of("datatype")); - } else { + } else if (uri != null){ + writer.beginObject(); + writer.name("uri"); writer.value(uri); + writer.endObject(); + } else { + writer.beginObject(); + writer.name("uri"); + writer.value("?"); + writer.endObject(); } break; case RELATED: @@ -640,9 +648,9 @@ public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable writer.value("?"); break; } - } catch (Throwable t) { - logger.error("Error writing property value {}", value, t); - throw t; + } catch (Exception e) { + logger.error("Error writing property value {}", value); + e.printStackTrace(); } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index fc4a30fe1..a6f6f7d83 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -208,22 +208,36 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom( private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_oneOf( OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { - // The filler is an RDF list of Individuals - OntologyNode fillerNode = graph.nodes.get( ((PropertyValueBNode) filler).getId() ); - List fillerIndividuals = - RdfListEvaluator.evaluateRdfList(fillerNode, graph) - .stream() - .map(propertyValue -> graph.nodes.get( ((PropertyValueURI) propertyValue).getUri() )) - .collect(Collectors.toList()); + logger.info("filler node uri: "+fillerNode.uri); + + List fillerIndividuals = new ArrayList<>(); + if(fillerNode != null){ + for (PropertyValue propertyValue : RdfListEvaluator.evaluateRdfList(fillerNode, graph)){ + if (propertyValue != null){ + OntologyNode ontologyNode = null; + try { + graph.getNodeForPropertyValue(propertyValue); + logger.info("success property value"); + } catch (Exception e){ + logger.error("fail property value"); + } + if (ontologyNode != null && ontologyNode.uri != null){ + logger.info("ontology node uri: "+ontologyNode.uri); + fillerIndividuals.add(ontologyNode); + } + } + } - for(OntologyNode individualNode : fillerIndividuals) { - classNode.properties.addProperty("relatedTo", - new PropertyValueRelated(fillerNode, propertyUri, individualNode)); - individualNode.properties.addProperty("relatedFrom", - new PropertyValueRelated(fillerNode, propertyUri, classNode)); + for(OntologyNode individualNode : fillerIndividuals) { + classNode.properties.addProperty("relatedTo", + new PropertyValueRelated(fillerNode, propertyUri, individualNode)); + individualNode.properties.addProperty("relatedFrom", + new PropertyValueRelated(fillerNode, propertyUri, classNode)); + } } + } private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_intersectionOf( @@ -269,7 +283,7 @@ private static void annotateRelated_Class_subClassOf_Restriction_hasValue(Ontolo } return; - } + } // TODO: what to do with data values? } From 3beb4a74f650945584d3112f129a0dabbe465d4e Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 8 Oct 2024 14:32:18 +0200 Subject: [PATCH 105/146] splitted output files based on ontology for #60 --- .../json2solr/src/main/java/JSON2Solr.java | 78 ++++++++++--------- 1 file changed, 43 insertions(+), 35 deletions(-) diff --git a/dataload/json2solr/src/main/java/JSON2Solr.java b/dataload/json2solr/src/main/java/JSON2Solr.java index cad0e0e57..9940e6a7c 100644 --- a/dataload/json2solr/src/main/java/JSON2Solr.java +++ b/dataload/json2solr/src/main/java/JSON2Solr.java @@ -39,24 +39,15 @@ public static void main(String[] args) throws IOException { String inputFilePath = cmd.getOptionValue("input"); String outPath = cmd.getOptionValue("outDir"); - PrintStream ontologiesWriter = null; - PrintStream classesWriter = null; - PrintStream propertiesWriter = null; - PrintStream individualsWriter = null; - PrintStream autocompleteWriter = null; - - - String ontologiesOutName = outPath + "/ontologies.jsonl"; - String classesOutName = outPath + "/classes.jsonl"; - String propertiesOutName = outPath + "/properties.jsonl"; - String individualsOutName = outPath + "/individuals.jsonl"; - String autocompleteOutName = outPath + "/autocomplete.jsonl"; + File file = new File(outPath); + try { + file.mkdirs(); + file.createNewFile(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } - ontologiesWriter = new PrintStream(ontologiesOutName); - classesWriter = new PrintStream(classesOutName); - propertiesWriter = new PrintStream(propertiesOutName); - individualsWriter = new PrintStream(individualsOutName); - autocompleteWriter = new PrintStream(autocompleteOutName); + Map writers = new HashMap<>(); JsonReader reader = new JsonReader(new InputStreamReader(new FileInputStream(inputFilePath))); @@ -98,10 +89,9 @@ public static void main(String[] args) throws IOException { flattenedClass.put("id", entityId); flattenProperties(_class, flattenedClass); + writeEntity("classes",ontologyId,flattenedClass,outPath,writers); - classesWriter.println(gson.toJson(flattenedClass)); - - writeAutocompleteEntries(ontologyId, entityId, flattenedClass, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedClass, outPath, writers); } reader.endArray(); @@ -123,9 +113,9 @@ public static void main(String[] args) throws IOException { flattenProperties(property, flattenedProperty); - propertiesWriter.println(gson.toJson(flattenedProperty)); + writeEntity("properties",ontologyId,flattenedProperty,outPath,writers); - writeAutocompleteEntries(ontologyId, entityId, flattenedProperty, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedProperty,outPath,writers); } reader.endArray(); @@ -147,9 +137,9 @@ public static void main(String[] args) throws IOException { flattenProperties(individual, flattenedIndividual); - individualsWriter.println(gson.toJson(flattenedIndividual)); + writeEntity("individuals",ontologyId,flattenedIndividual,outPath,writers); - writeAutocompleteEntries(ontologyId, entityId, flattenedIndividual, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedIndividual,outPath,writers); } reader.endArray(); @@ -176,7 +166,7 @@ public static void main(String[] args) throws IOException { flattenProperties(ontology, flattenedOntology); - ontologiesWriter.println(gson.toJson(flattenedOntology)); + writeEntity("ontologies",ontologyId,flattenedOntology,outPath,writers); reader.endObject(); // ontology } @@ -194,6 +184,24 @@ public static void main(String[] args) throws IOException { reader.close(); } + static private void writeEntity(String type, String ontologyId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { + if(writers.containsKey(ontologyId+"_"+type)) + writers.get(ontologyId+"_"+type).println(gson.toJson(flattenedEntity)); + else { + writers.put(ontologyId+"_"+type,new PrintStream(outPath+"/"+ontologyId+"_"+type+".jsonl")); + writers.get(ontologyId+"_"+type).println(gson.toJson(flattenedEntity)); + } + } + + static private void writeAutocomplete(String ontologyId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { + if(writers.containsKey(ontologyId+"_autocomplete")) + writers.get(ontologyId+"_autocomplete").println(gson.toJson(flattenedEntity, Map.class)); + else { + writers.put(ontologyId+"_autocomplete",new PrintStream(outPath+"/"+ontologyId+"_autocomplete.jsonl")); + writers.get(ontologyId+"_autocomplete").println(gson.toJson(flattenedEntity, Map.class)); + } + } + static private void flattenProperties(Map properties, Map flattened) { for (String k : properties.keySet()) { @@ -233,24 +241,24 @@ static private void flattenProperties(Map properties, Map flattenedEntity, PrintStream autocompleteWriter) { + static void writeAutocompleteEntries(String ontologyId, String entityId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { Object labels = flattenedEntity.get("label"); if(labels instanceof List) { for(Object label : (List) labels) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)label), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)label),outPath,writers); } } else if(labels instanceof String) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)labels), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)labels),outPath,writers); } Object synonyms = flattenedEntity.get("synonym"); if(synonyms instanceof List) { for(Object label : (List) synonyms) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)label), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)label),outPath,writers); } } else if(synonyms instanceof String) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)synonyms), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)synonyms),outPath,writers); } } From 0198cdde0b7eda01632046f828a7e71237f1048d Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 8 Oct 2024 18:10:12 +0200 Subject: [PATCH 106/146] closed all PrintWriters by the end of execution for #60 --- dataload/json2solr/src/main/java/JSON2Solr.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dataload/json2solr/src/main/java/JSON2Solr.java b/dataload/json2solr/src/main/java/JSON2Solr.java index 9940e6a7c..723b2dc1a 100644 --- a/dataload/json2solr/src/main/java/JSON2Solr.java +++ b/dataload/json2solr/src/main/java/JSON2Solr.java @@ -182,6 +182,8 @@ public static void main(String[] args) throws IOException { reader.endObject(); reader.close(); + for (PrintStream printStream : writers.values()) + printStream.close(); } static private void writeEntity(String type, String ontologyId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { From 2f37563492637d7bf7b2ca45962c0b07ea05ae06 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 9 Oct 2024 14:45:58 +0200 Subject: [PATCH 107/146] updated load into solr script based on the json2solr module outputs for #60 --- dataload/load_into_solr.sh | 39 +++++++++++++++----------------------- 1 file changed, 15 insertions(+), 24 deletions(-) diff --git a/dataload/load_into_solr.sh b/dataload/load_into_solr.sh index aae049a28..86e7d65fb 100755 --- a/dataload/load_into_solr.sh +++ b/dataload/load_into_solr.sh @@ -8,33 +8,24 @@ fi $1/bin/solr start -force -Djetty.host=127.0.0.1 sleep 10 -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/ontologies.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/classes.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/properties.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/individuals.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/autocomplete.jsonl \ - http://127.0.0.1:8983/solr/ols4_autocomplete/update/json/docs?commit=true - +FILES=$2/*_*.jsonl +for f in $FILES +do + echo "$f" + if [[ $f == *_ontologies.jsonl ]] || [[ $f == *_classes.jsonl ]] || [[ $f == *_properties.jsonl ]] || [[ $f == *_individuals.jsonl ]]; then + echo 'entity' + wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $f http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true + elif [[ $f == *_autocomplete.jsonl ]]; then + echo 'autocomplete' + wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $f http://127.0.0.1:8983/solr/ols4_autocomplete/update/json/docs?commit=true + fi +done sleep 5 - +echo 'update entities' wget --no-proxy http://127.0.0.1:8983/solr/ols4_entities/update?commit=true - sleep 5 - +echo 'update autocomplete' wget --no-proxy http://127.0.0.1:8983/solr/ols4_autocomplete/update?commit=true - -sleep 5 +echo 'loading solr finished' $1/bin/solr stop - - - - From fc78422aeee10c35fb7db7ec655fee453bc8b6d2 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 11 Oct 2024 12:56:39 +0200 Subject: [PATCH 108/146] ensure output csv directory is present --- dataload/json2neo/src/main/java/JSON2CSV.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dataload/json2neo/src/main/java/JSON2CSV.java b/dataload/json2neo/src/main/java/JSON2CSV.java index d37d64a42..5fe0cedeb 100644 --- a/dataload/json2neo/src/main/java/JSON2CSV.java +++ b/dataload/json2neo/src/main/java/JSON2CSV.java @@ -43,6 +43,14 @@ public static void main(String[] args) throws IOException { String inputFilePath = cmd.getOptionValue("input"); String outputFilePath = cmd.getOptionValue("outDir"); + File file = new File(outputFilePath); + try { + file.mkdirs(); + file.createNewFile(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } + new NeoConverter(inputFilePath, outputFilePath).convert(); } From a402de4e05110c1f1c5c67fd317c805bdea097a4 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Mon, 14 Oct 2024 16:19:38 +0200 Subject: [PATCH 109/146] added condition to remove self reference in relatedFrom property --- .../rdf2json/annotators/RelatedAnnotator.java | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index a6f6f7d83..87eb4358d 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -37,7 +37,7 @@ public static void annotateRelated(OntologyGraph graph) { // We are only looking for anonymous parents, which are either class expressions or restrictions. // - if(parent.getType() != PropertyValue.Type.BNODE) { + if(parent.getType() != PropertyValue.Type.BNODE) { continue; } @@ -173,10 +173,20 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom( OntologyNode fillerNode = graph.nodes.get(fillerUri); if(fillerNode != null) { // sometimes filler not included in ontology, e.g. "subClassOf some xsd:float" in cdao - - classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); - fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); - + PropertyValue someClassFrom = null; + if(fillerRestriction != null) + someClassFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); + + if(someClassFrom != null) { + if(!((PropertyValueURI) someClassFrom).getUri().equalsIgnoreCase(fillerUri)) { + classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + } + } + else { + classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + } } } @@ -211,7 +221,7 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_ OntologyNode fillerNode = graph.nodes.get( ((PropertyValueBNode) filler).getId() ); logger.info("filler node uri: "+fillerNode.uri); - + List fillerIndividuals = new ArrayList<>(); if(fillerNode != null){ for (PropertyValue propertyValue : RdfListEvaluator.evaluateRdfList(fillerNode, graph)){ From 1ccd7cf1d99ed76a17080b08d84399542b4c3162 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Mon, 21 Oct 2024 09:37:36 +0200 Subject: [PATCH 110/146] merged with Dev --- .../java/uk/ac/ebi/rdf2json/RDF2JSON.java | 29 +++++++++++++------ .../rdf2json/annotators/RelatedAnnotator.java | 10 +++---- 2 files changed, 25 insertions(+), 14 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java index 86765c5a3..9a12f2e77 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java @@ -1,14 +1,10 @@ package uk.ac.ebi.rdf2json; -import com.google.gson.Gson; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonToken; -import com.google.gson.stream.JsonWriter; -import org.apache.commons.cli.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; +import java.io.FileInputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; import java.net.URL; import java.util.Arrays; import java.util.Collection; @@ -20,6 +16,21 @@ import java.util.TreeMap; import java.util.stream.Collectors; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.HelpFormatter; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; +import com.google.gson.stream.JsonWriter; + public class RDF2JSON { private static final Logger logger = LoggerFactory.getLogger(RDF2JSON.class); diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index 87eb4358d..6e7f5db63 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -173,12 +173,12 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom( OntologyNode fillerNode = graph.nodes.get(fillerUri); if(fillerNode != null) { // sometimes filler not included in ontology, e.g. "subClassOf some xsd:float" in cdao - PropertyValue someClassFrom = null; + PropertyValue someValuesFrom = null; if(fillerRestriction != null) - someClassFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); + someValuesFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); - if(someClassFrom != null) { - if(!((PropertyValueURI) someClassFrom).getUri().equalsIgnoreCase(fillerUri)) { + if(someValuesFrom != null) { + if(!((PropertyValueURI) someValuesFrom).getUri().equalsIgnoreCase(fillerUri)) { classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); } @@ -228,7 +228,7 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_ if (propertyValue != null){ OntologyNode ontologyNode = null; try { - graph.getNodeForPropertyValue(propertyValue); + ontologyNode = graph.getNodeForPropertyValue(propertyValue); logger.info("success property value"); } catch (Exception e){ logger.error("fail property value"); From eb75938b7a74f6c8ee100afe612d2528d2095b39 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Mon, 21 Oct 2024 09:43:57 +0200 Subject: [PATCH 111/146] merged import from Dev --- .../java/uk/ac/ebi/rdf2json/RDF2JSON.java | 29 ++++++------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java index 9a12f2e77..86765c5a3 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java @@ -1,10 +1,14 @@ package uk.ac.ebi.rdf2json; -import java.io.FileInputStream; -import java.io.FileWriter; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; +import com.google.gson.Gson; +import com.google.gson.stream.JsonReader; +import com.google.gson.stream.JsonToken; +import com.google.gson.stream.JsonWriter; +import org.apache.commons.cli.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; import java.net.URL; import java.util.Arrays; import java.util.Collection; @@ -16,21 +20,6 @@ import java.util.TreeMap; import java.util.stream.Collectors; -import org.apache.commons.cli.CommandLine; -import org.apache.commons.cli.CommandLineParser; -import org.apache.commons.cli.DefaultParser; -import org.apache.commons.cli.HelpFormatter; -import org.apache.commons.cli.Option; -import org.apache.commons.cli.Options; -import org.apache.commons.cli.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.gson.Gson; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonToken; -import com.google.gson.stream.JsonWriter; - public class RDF2JSON { private static final Logger logger = LoggerFactory.getLogger(RDF2JSON.class); From 41c27d475e5d6c3d4f63b1b9bfeab625979b8ca5 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 22 Oct 2024 17:11:53 +0200 Subject: [PATCH 112/146] added null check for JSONElement in solr response --- .../transforms/RemoveLiteralDatatypesTransform.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/transforms/RemoveLiteralDatatypesTransform.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/transforms/RemoveLiteralDatatypesTransform.java index a2ef26b2a..32a9648a2 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/transforms/RemoveLiteralDatatypesTransform.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/transforms/RemoveLiteralDatatypesTransform.java @@ -2,6 +2,7 @@ import com.google.gson.JsonArray; import com.google.gson.JsonElement; +import com.google.gson.JsonNull; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import uk.ac.ebi.spot.ols.repository.transforms.helpers.JsonCollectionHelper; @@ -16,6 +17,9 @@ public class RemoveLiteralDatatypesTransform { into just "Diabetes" */ public static JsonElement transform(JsonElement object) { + + if (object == null) + return JsonNull.INSTANCE; if (object.isJsonArray()) { From 9ce17096208ad04d74a7124c8b1fc5de370821bd Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 23 Oct 2024 16:40:43 +0200 Subject: [PATCH 113/146] updated dockerized dataload based on csv2neo for #76 --- dataload/load_into_neo4j.sh | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index 0c5305687..d79e2d62f 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -5,17 +5,15 @@ if [ $# == 0 ]; then exit 1 fi +SCRIPT_PATH=$(dirname $(readlink -f $0)) + rm -rf $1/data/databases/neo4j rm -rf $1/data/transactions/neo4j -$1/bin/neo4j-admin database import full \ - --ignore-empty-strings=true \ - --legacy-style-quoting=false \ - --multiline-fields=true \ - --read-buffer-size=16777216 \ - --array-delimiter="|" \ - --threads=16 \ - $(./make_csv_import_cmd.sh $2) +$1/bin/neo4j start +echo csv2neo + +java -jar $SCRIPT_PATH/csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -m i -d $2 -bs 1000 -ps 20 -t 5 $1/bin/neo4j-admin database info neo4j From de3434b7a27406924c6cdb6fc8dfaf6c5e4cc3f2 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Thu, 24 Oct 2024 17:09:43 +0200 Subject: [PATCH 114/146] added fix for "expanded" in search result. Fixes #78 --- .../controller/api/v1/V1SearchController.java | 105 +++++++++++++++++- 1 file changed, 102 insertions(+), 3 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index e51c3097c..9d40fa3af 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -22,6 +22,7 @@ import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -166,7 +167,7 @@ public void search( if (groupField != null) { solrQuery.addFilterQuery("{!collapse field=iri}"); - solrQuery.add("expand=true", "true"); + solrQuery.add("expand", "true"); solrQuery.add("expand.rows", "100"); } @@ -231,7 +232,8 @@ public void search( QueryResponse qr = solrClient.dispatchSearch(solrQuery, "ols4_entities"); - List docs = new ArrayList<>(); + List docs = parseSolrDocs(qr.getResults(), fieldList, lang); + /*List docs = new ArrayList<>(); for(SolrDocument res : qr.getResults()) { String _json = (String)res.get("_json"); if(_json == null) { @@ -299,7 +301,7 @@ public void search( } docs.add(outDoc); - } + }*/ Map responseHeader = new HashMap<>(); responseHeader.put("status", 0); @@ -337,11 +339,108 @@ public void search( * Fix: End */ + /** + * Fix: Start + * issue - https://github.com/TIBHannover/ols4/issues/78 + * + */ + if(qr.getExpandedResults() != null && qr.getExpandedResults().size() > 0) + responseObj.put("expanded", parseExpandedSolrResults(qr.getExpandedResults(), fieldList, lang)); + + /** + * Fix: End + */ response.setContentType(MediaType.APPLICATION_JSON_VALUE); response.setCharacterEncoding(StandardCharsets.UTF_8.name()); response.getOutputStream().write(gson.toJson(responseObj).getBytes(StandardCharsets.UTF_8)); response.flushBuffer(); } + + private Map parseExpandedSolrResults(Map expandedResults, Collection fieldList, + String lang) { + Map result = new HashMap<>(); + expandedResults.entrySet().parallelStream().forEach((entry) -> { + Map expandedResult = new HashMap<>(); + expandedResult.put("numFound", entry.getValue().getNumFound()); + expandedResult.put("start", entry.getValue().getStart()); + expandedResult.put("docs", parseSolrDocs(entry.getValue(), fieldList, lang)); + result.put(entry.getKey(), expandedResult); + }); + return result; + } + + private List parseSolrDocs(SolrDocumentList results, Collection fieldList, String lang) { + List docs = new ArrayList<>(); + for(SolrDocument res : results) { + String _json = (String)res.get("_json"); + if(_json == null) { + throw new RuntimeException("_json was null"); + } + + JsonObject json = RemoveLiteralDatatypesTransform.transform( + LocalizationTransform.transform( JsonParser.parseString( _json ), lang) + ).getAsJsonObject(); + + Map outDoc = new HashMap<>(); + + if (fieldList == null) { + fieldList = new HashSet<>(); + } + // default fields + if (fieldList.isEmpty()) { + fieldList.add("id"); + fieldList.add("iri"); + fieldList.add("ontology_name"); + fieldList.add("label"); + fieldList.add("description"); + fieldList.add("short_form"); + fieldList.add("obo_id"); + fieldList.add("type"); + fieldList.add("ontology_prefix"); + } + + if (fieldList.contains("id")) outDoc.put("id", JsonHelper.getString(json, "id")); + if (fieldList.contains("iri")) outDoc.put("iri", JsonHelper.getString(json, "iri")); + if (fieldList.contains("ontology_name")) outDoc.put("ontology_name", JsonHelper.getString(json, "ontologyId")); + if (fieldList.contains("label")) { + var label = outDoc.put("label", JsonHelper.getString(json, "label")); + if(label!=null) { + outDoc.put("label", label); + } + } + if (fieldList.contains("description")) outDoc.put("description", JsonHelper.getStrings(json, "definition")); + if (fieldList.contains("short_form")) outDoc.put("short_form", JsonHelper.getString(json, "shortForm")); + if (fieldList.contains("obo_id")) outDoc.put("obo_id", JsonHelper.getString(json, "curie")); + if (fieldList.contains(IS_DEFINING_ONTOLOGY.getOls3Text())) outDoc.put(IS_DEFINING_ONTOLOGY.getOls3Text(), + JsonHelper.getString(json, IS_DEFINING_ONTOLOGY.getText()) != null && + JsonHelper.getString(json, IS_DEFINING_ONTOLOGY.getText()).equals("true")); + if (fieldList.contains("type")) { + outDoc.put("type", JsonHelper.getType(json, "type")); + } + if (fieldList.contains("synonym")) outDoc.put("synonym", JsonHelper.getStrings(json, "synonym")); + if (fieldList.contains("ontology_prefix")) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + if (fieldList.contains("subset")) outDoc.put("subset", JsonHelper.getStrings(json, "http://www.geneontology.org/formats/oboInOwl#inSubset")); + if (fieldList.contains("ontology_iri")) outDoc.put("ontology_iri", JsonHelper.getStrings(json, "ontologyIri").get(0)); + if (fieldList.contains("score")) outDoc.put("score", res.get("score")); + + // Include annotations that were specified with _annotation + boolean anyAnnotations = fieldList.stream() + .anyMatch(s -> s.endsWith("_annotation")); + if (anyAnnotations) { + Stream annotationFields = fieldList.stream().filter(s -> s.endsWith("_annotation")); + Map termAnnotations = AnnotationExtractor.extractAnnotations(json); + + annotationFields.forEach(annotationName -> { + // Remove _annotation suffix to get plain annotation name + String fieldName = annotationName.replaceFirst("_annotation$", ""); + outDoc.put(annotationName, termAnnotations.get(fieldName)); + }); + } + + docs.add(outDoc); + } + return docs; + } private Map> parseFacetFields(List facetFields) { Map> facetFieldsMap = new HashMap<>(); From 46e9335453183cb44c62776385ab6231b6022feb Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 25 Oct 2024 11:13:58 +0200 Subject: [PATCH 115/146] added old neo4j load script as backup and disabled duplicate start of neo4j for #76 --- dataload/create_neo4j_indexes.sh | 4 ++-- dataload/load_into_neo4j.sh | 4 +--- dataload/load_into_neo4j_with_import_tool.sh | 23 ++++++++++++++++++++ 3 files changed, 26 insertions(+), 5 deletions(-) create mode 100644 dataload/load_into_neo4j_with_import_tool.sh diff --git a/dataload/create_neo4j_indexes.sh b/dataload/create_neo4j_indexes.sh index 9ee4bce3b..b4466a6d5 100755 --- a/dataload/create_neo4j_indexes.sh +++ b/dataload/create_neo4j_indexes.sh @@ -5,8 +5,8 @@ if [ $# == 0 ]; then exit 1 fi -$1/bin/neo4j start -sleep 20 +#$1/bin/neo4j start +#sleep 20 echo Creating neo4j indexes... diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index d79e2d62f..bd7341c9b 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -11,11 +11,9 @@ rm -rf $1/data/databases/neo4j rm -rf $1/data/transactions/neo4j $1/bin/neo4j start +sleep 20 echo csv2neo - java -jar $SCRIPT_PATH/csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -m i -d $2 -bs 1000 -ps 20 -t 5 -$1/bin/neo4j-admin database info neo4j - diff --git a/dataload/load_into_neo4j_with_import_tool.sh b/dataload/load_into_neo4j_with_import_tool.sh new file mode 100644 index 000000000..0c5305687 --- /dev/null +++ b/dataload/load_into_neo4j_with_import_tool.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +if [ $# == 0 ]; then + echo "Usage: $0 " + exit 1 +fi + +rm -rf $1/data/databases/neo4j +rm -rf $1/data/transactions/neo4j + +$1/bin/neo4j-admin database import full \ + --ignore-empty-strings=true \ + --legacy-style-quoting=false \ + --multiline-fields=true \ + --read-buffer-size=16777216 \ + --array-delimiter="|" \ + --threads=16 \ + $(./make_csv_import_cmd.sh $2) + +$1/bin/neo4j-admin database info neo4j + + + From 626396ab50d46625f3db247b1450558f65ab33fe Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Fri, 25 Oct 2024 14:02:41 +0200 Subject: [PATCH 116/146] added JsonNull check in objectToString. Fixes #83 --- .../main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java index dd0bc07d3..04b80f6e1 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/JsonHelper.java @@ -3,6 +3,7 @@ import com.google.common.collect.Lists; import com.google.gson.JsonArray; import com.google.gson.JsonElement; +import com.google.gson.JsonNull; import com.google.gson.JsonObject; import java.util.List; @@ -21,7 +22,7 @@ public static boolean getBoolean(JsonObject json, String key) { public static String objectToString(JsonElement value) { - if(value == null) { + if(value == null || value instanceof JsonNull) { return null; } From 3b2a3dfde58fdab74468566a9f1dd8a0e697db3e Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 25 Oct 2024 16:47:03 +0200 Subject: [PATCH 117/146] initialized null jsonelements with default jsonobject and generalized exception for #85 --- .../api/v1/V1OntologyTermController.java | 2 +- .../spot/ols/repository/v1/V1GraphRepository.java | 15 +++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index 297594482..789b60671 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -485,7 +485,7 @@ HttpEntity graphJson( Object object= graphRepository.getGraphForClass(decoded, ontologyId, lang); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); - } catch (JsonProcessingException e) { + } catch (Exception e) { e.printStackTrace(); } throw new ResourceNotFoundException(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index 9c7aa2ce8..2007ac9ed 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -1,9 +1,6 @@ package uk.ac.ebi.spot.ols.repository.v1; -import com.google.gson.Gson; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; +import com.google.gson.*; import org.neo4j.driver.types.Node; import org.neo4j.driver.types.Relationship; import org.springframework.beans.factory.annotation.Autowired; @@ -150,8 +147,9 @@ Map getRelatedFrom(String entityId) { JsonObject getOntologyNodeJson(Node node, String lang) { - - JsonElement ontologyNodeObject = JsonParser.parseString((String) node.asMap().get("_json")); + JsonElement ontologyNodeObject = new JsonObject(); + if(node.asMap().get("_json") != null && node.asMap().get("_json") instanceof String) + ontologyNodeObject = JsonParser.parseString((String) node.asMap().get("_json")); return RemoveLiteralDatatypesTransform.transform( LocalizationTransform.transform(ontologyNodeObject, lang) @@ -159,8 +157,9 @@ JsonObject getOntologyNodeJson(Node node, String lang) { } JsonObject getOntologyEdgeJson(Relationship r, String lang) { - - JsonElement ontologyEdgeObject = JsonParser.parseString((String) r.asMap().get("_json")); + JsonElement ontologyEdgeObject = new JsonObject(); + if(r.asMap().get("_json") != null && r.asMap().get("_json") instanceof String) + ontologyEdgeObject = JsonParser.parseString((String) r.asMap().get("_json")); return RemoveLiteralDatatypesTransform.transform( LocalizationTransform.transform(ontologyEdgeObject, lang) From 275a8344eb8ba518b503c88c3583f3e7819ddacc Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sun, 27 Oct 2024 15:00:04 +0100 Subject: [PATCH 118/146] added logic for unconfigured preferredprefixes on #73 --- .../ebi/spot/ols/controller/api/v1/V1SelectController.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index f4988adc5..73712718f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -183,7 +183,12 @@ public void select( outDoc.put("type", JsonHelper.getType(json, "type")); } if (fieldList.contains("synonym")) outDoc.put("synonym", JsonHelper.getStrings(json, "synonym")); - if (fieldList.contains("ontology_prefix")) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + if (fieldList.contains("ontology_prefix")) + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + else if (fieldList.contains("obo_id")) + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId").toUpperCase()); + else + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId")); docs.add(outDoc); } From fa23a4d04ca65867cd05d5a663640485c242d6c3 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 28 Oct 2024 10:52:23 +0100 Subject: [PATCH 119/146] corrected logic for #73 --- .../ebi/spot/ols/controller/api/v1/V1SelectController.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index 73712718f..20aaa2000 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -183,12 +183,10 @@ public void select( outDoc.put("type", JsonHelper.getType(json, "type")); } if (fieldList.contains("synonym")) outDoc.put("synonym", JsonHelper.getStrings(json, "synonym")); - if (fieldList.contains("ontology_prefix")) + if (fieldList.contains("ontology_prefix") && JsonHelper.getString(json, "ontologyPreferredPrefix") != null) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); - else if (fieldList.contains("obo_id")) - outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId").toUpperCase()); else - outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId")); + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId").toUpperCase()); docs.add(outDoc); } From a099163e5c2aa114d00fda687870824b1e3bdb37 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 28 Oct 2024 11:03:31 +0100 Subject: [PATCH 120/146] corrected logic for #73 --- .../ac/ebi/spot/ols/controller/api/v1/V1SelectController.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index 20aaa2000..ab6d7eeda 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -185,7 +185,7 @@ public void select( if (fieldList.contains("synonym")) outDoc.put("synonym", JsonHelper.getStrings(json, "synonym")); if (fieldList.contains("ontology_prefix") && JsonHelper.getString(json, "ontologyPreferredPrefix") != null) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); - else + else if (fieldList.contains("ontology_prefix")) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId").toUpperCase()); docs.add(outDoc); From 25c413707592f44a6ee0c63b5237647acefd4af5 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 28 Oct 2024 18:07:47 +0100 Subject: [PATCH 121/146] added convertToRDF parameter to rdf2json in dockerized installation for #76 --- dataload/create_datafiles.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dataload/create_datafiles.sh b/dataload/create_datafiles.sh index ab70b4374..936cd37d6 100755 --- a/dataload/create_datafiles.sh +++ b/dataload/create_datafiles.sh @@ -18,7 +18,7 @@ rm -f $OUTDIR/* echo JAVA_OPTS=$JAVA_OPTS echo rdf2json -java $JAVA_OPTS -DentityExpansionLimit=0 -DtotalEntitySizeLimit=0 -Djdk.xml.totalEntitySizeLimit=0 -Djdk.xml.entityExpansionLimit=0 -jar $SCRIPT_PATH/rdf2json/target/rdf2json-1.0-SNAPSHOT.jar --config "$CONFIG_URL" --output "$JSON_PATH" "${@:3}" +java $JAVA_OPTS -DentityExpansionLimit=0 -DtotalEntitySizeLimit=0 -Djdk.xml.totalEntitySizeLimit=0 -Djdk.xml.entityExpansionLimit=0 -jar $SCRIPT_PATH/rdf2json/target/rdf2json-1.0-SNAPSHOT.jar --config "$CONFIG_URL" --output "$JSON_PATH" --convertToRDF "${@:3}" echo linker java -jar $SCRIPT_PATH/linker/target/linker-1.0-SNAPSHOT.jar --input "$JSON_PATH" --output "$JSON_PATH_LINKED" From 362ab3def129c940b04de9123da516748b4357e0 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 29 Oct 2024 10:02:00 +0100 Subject: [PATCH 122/146] removed duplicate neo4j index step for #76 --- dataload/dataload.dockersh | 1 - 1 file changed, 1 deletion(-) diff --git a/dataload/dataload.dockersh b/dataload/dataload.dockersh index 8dae8f417..3a2153707 100755 --- a/dataload/dataload.dockersh +++ b/dataload/dataload.dockersh @@ -6,7 +6,6 @@ echo Loading data... rm -rf /opt/solr/server/solr/* cp -r /opt/ols/dataload/solr_config/* /opt/solr/server/solr/ ./load_into_solr.sh /opt/solr /tmp/out -./create_neo4j_indexes.sh /opt/neo4j chmod -R 777 /opt/solr/server/solr/* chmod -R 777 /opt/neo4j/data/* From 139ceadf4fba1776540c0b58a2d8da17e6927b58 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 29 Oct 2024 10:04:34 +0100 Subject: [PATCH 123/146] reverted changes in unused create neo4j indexes script for #76 --- dataload/create_neo4j_indexes.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dataload/create_neo4j_indexes.sh b/dataload/create_neo4j_indexes.sh index b4466a6d5..9ee4bce3b 100755 --- a/dataload/create_neo4j_indexes.sh +++ b/dataload/create_neo4j_indexes.sh @@ -5,8 +5,8 @@ if [ $# == 0 ]; then exit 1 fi -#$1/bin/neo4j start -#sleep 20 +$1/bin/neo4j start +sleep 20 echo Creating neo4j indexes... From 5083153b5aa121c99faea2e5d7cc33e1ab39b88b Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 29 Oct 2024 11:35:14 +0100 Subject: [PATCH 124/146] made dockerized csv2neo configurable for #76 --- dataload/load_into_neo4j.sh | 7 +++++-- docker-compose.yml | 3 +++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index bd7341c9b..aa2142e7f 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -6,14 +6,17 @@ if [ $# == 0 ]; then fi SCRIPT_PATH=$(dirname $(readlink -f $0)) +[[ -z "${BATCH_SIZE}" ]] && bs=1000 || bs="${BATCH_SIZE}" +[[ -z "${POOL_SIZE}" ]] && ps=20 || ps="${POOL_SIZE}" +[[ -z "${NOOF_ATTEMPTS}" ]] && t=5 || t="${NOOF_ATTEMPTS}" rm -rf $1/data/databases/neo4j rm -rf $1/data/transactions/neo4j $1/bin/neo4j start sleep 20 -echo csv2neo -java -jar $SCRIPT_PATH/csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -m i -d $2 -bs 1000 -ps 20 -t 5 +echo "csv2neo with batch size $bs and pool size $ps" and number of attempts $t +java -jar $SCRIPT_PATH/csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -m i -d $2 -bs $bs -ps $ps -t $t diff --git a/docker-compose.yml b/docker-compose.yml index d94132fa1..1f0e15973 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,6 +3,9 @@ services: ols4-dataload: environment: - JAVA_OPTS=${JAVA_OPTS} + - BATCH_SIZE=${BATCH_SIZE} + - POOL_SIZE=${POOL_SIZE} + - NOOF_ATTEMPTS=${NOOF_ATTEMPTS} build: context: . dockerfile: ./dataload/Dockerfile From df44edda773b974fd24fb0385e30fc8ae8debcf3 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 29 Oct 2024 15:27:58 +0100 Subject: [PATCH 125/146] updated uri writing to json. --- .../main/java/uk/ac/ebi/rdf2json/OntologyGraph.java | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index dbad3962d..f746ec244 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -610,16 +610,8 @@ public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable if (uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { // special case for rdfs:Datatype; nest it as with a bnode instead of referencing writeNode(writer, uriNode, Set.of("datatype")); - } else if (uri != null){ - writer.beginObject(); - writer.name("uri"); - writer.value(uri); - writer.endObject(); - } else { - writer.beginObject(); - writer.name("uri"); - writer.value("?"); - writer.endObject(); + }else { + writer.value(uri); } break; case RELATED: From e9df063bc63bd06ba92d970cb65f4effdb8ecfd3 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 4 Nov 2024 21:12:47 +0100 Subject: [PATCH 126/146] configured max row size in filter queries for #72 --- .../java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java index e65c783ea..75ba375dd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java @@ -18,6 +18,7 @@ import org.apache.solr.common.SolrDocument; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; @@ -119,7 +120,7 @@ public JsonElement getByIndex(OlsSolrQuery query, int i) { public Set getSet(OlsSolrQuery query){ Set tempSet = new HashSet<>(); - QueryResponse qr = runSolrQuery(query, null); + QueryResponse qr = runSolrQuery(query, PageRequest.of(0, MAX_ROWS)); for (int i = 0; i Date: Tue, 5 Nov 2024 16:50:04 +0100 Subject: [PATCH 127/146] updated neo4j and solr and made their download optional for #59 --- dataload/Dockerfile | 30 ++++++++++++++++-------------- docker-compose.yml | 4 ++-- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/dataload/Dockerfile b/dataload/Dockerfile index c052ed8f5..bdbf0f2c8 100644 --- a/dataload/Dockerfile +++ b/dataload/Dockerfile @@ -7,30 +7,32 @@ FROM maven:3.9.6-eclipse-temurin-17 RUN addgroup --system --gid 1000 ols && adduser --system --uid 1000 --ingroup ols ols +# Copy all the code for dataload into /opt/dataload and build the JARs +# + +RUN mkdir /opt/ols && mkdir /opt/ols/dataload && mkdir /opt/ols/ols-shared + +COPY ./dataload /opt/ols/dataload +COPY ./ols-shared /opt/ols/ols-shared +COPY ./pom.xml /opt/ols + # Extract Neo4j and Solr vanilla installs to /opt/neo4j and /opt/solr # # We use these only as temporary servers for the dataload. The "real" Neo4j # and Solr servers are the standard images specified in docker-compose.yml # -RUN mkdir /opt/neo4j && \ - curl https://dist.neo4j.org/neo4j-community-5.19.0-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j -RUN mkdir /opt/solr && \ - curl https://archive.apache.org/dist/solr/solr/9.0.0/solr-9.0.0.tgz | tar -xz --strip-components=1 -C /opt/solr +RUN if [ ! -f "/opt/ols/dataload/neo4j-community-5.25.1-unix.tar.gz" ]; \ + then mkdir /opt/neo4j && curl https://dist.neo4j.org/neo4j-community-5.25.1-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j; \ + else mkdir /opt/neo4j && tar -xz -f /opt/ols/dataload/neo4j-community-5.25.1-unix.tar.gz --strip-components=1 -C /opt/neo4j; fi + +RUN if [ ! -f "/opt/ols/dataload/solr-9.7.0.tgz" ]; \ + then mkdir /opt/solr && curl https://archive.apache.org/dist/solr/solr/9.7.0/solr-9.7.0.tgz | tar -xz --strip-components=1 -C /opt/solr; \ + else mkdir /opt/solr && tar -xz -f /opt/ols/dataload/solr-9.7.0.tgz --strip-components=1 -C /opt/solr; fi RUN echo "dbms.security.auth_enabled=false" >> /opt/neo4j/conf/neo4j.conf RUN echo "dbms.jvm.additional=-Dorg.neo4j.kernel.impl.index.schema.GenericNativeIndexPopulator.blockBasedPopulation=true" >> /opt/neo4j/conf/neo4j.conf -# Copy all the code for dataload into /opt/dataload and build the JARs -# - -RUN mkdir /opt/ols && mkdir /opt/ols/dataload && mkdir /opt/ols/ols-shared - - -COPY ./dataload /opt/ols/dataload -COPY ./ols-shared /opt/ols/ols-shared -COPY ./pom.xml /opt/ols - RUN cd /opt/ols/ols-shared && mvn package RUN mvn install:install-file -DcreateChecksum=true -Dpackaging=jar -Dfile=/opt/ols/ols-shared/target/ols4-shared-1.0.0-SNAPSHOT.jar -DgroupId=uk.ac.ebi.spot.ols -DartifactId=ols4-shared -Dversion=1.0.0-SNAPSHOT diff --git a/docker-compose.yml b/docker-compose.yml index 1f0e15973..93adea45f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -25,7 +25,7 @@ services: command: - mongod ols4-solr: - image: solr:9.0.0 + image: solr:9.7.0 environment: - SOLR_HOME=/mnt/ols4-solr-data ports: @@ -37,7 +37,7 @@ services: ols4-dataload: condition: service_completed_successfully ols4-neo4j: - image: neo4j:5.19.0-community + image: neo4j:5.25.1-community ports: - 7474:7474 - 7687:7687 From 8daf27344af30be247bdb7ba7875f04026858db5 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 5 Nov 2024 17:45:14 +0100 Subject: [PATCH 128/146] placed copy and build commands in order for #59 --- dataload/Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dataload/Dockerfile b/dataload/Dockerfile index bdbf0f2c8..76553405b 100644 --- a/dataload/Dockerfile +++ b/dataload/Dockerfile @@ -16,6 +16,11 @@ COPY ./dataload /opt/ols/dataload COPY ./ols-shared /opt/ols/ols-shared COPY ./pom.xml /opt/ols +RUN cd /opt/ols/ols-shared && mvn package + +RUN mvn install:install-file -DcreateChecksum=true -Dpackaging=jar -Dfile=/opt/ols/ols-shared/target/ols4-shared-1.0.0-SNAPSHOT.jar -DgroupId=uk.ac.ebi.spot.ols -DartifactId=ols4-shared -Dversion=1.0.0-SNAPSHOT +RUN cd /opt/ols/dataload && mvn package + # Extract Neo4j and Solr vanilla installs to /opt/neo4j and /opt/solr # # We use these only as temporary servers for the dataload. The "real" Neo4j @@ -33,11 +38,6 @@ RUN if [ ! -f "/opt/ols/dataload/solr-9.7.0.tgz" ]; \ RUN echo "dbms.security.auth_enabled=false" >> /opt/neo4j/conf/neo4j.conf RUN echo "dbms.jvm.additional=-Dorg.neo4j.kernel.impl.index.schema.GenericNativeIndexPopulator.blockBasedPopulation=true" >> /opt/neo4j/conf/neo4j.conf -RUN cd /opt/ols/ols-shared && mvn package - -RUN mvn install:install-file -DcreateChecksum=true -Dpackaging=jar -Dfile=/opt/ols/ols-shared/target/ols4-shared-1.0.0-SNAPSHOT.jar -DgroupId=uk.ac.ebi.spot.ols -DartifactId=ols4-shared -Dversion=1.0.0-SNAPSHOT -RUN cd /opt/ols/dataload && mvn package - # Copy the OLS4 Solr config into our temporary Solr server # RUN rm -rf /opt/solr/server/solr From f4463e8d124ee953c0afda035d619ceb5b1a1355 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 12 Nov 2024 14:55:31 +0100 Subject: [PATCH 129/146] added code to retrieve FullJSTree with viewMode and siblings. Fixes #74 --- .../api/v1/V1OntologyTermController.java | 19 +- .../ols/repository/neo4j/OlsNeo4jClient.java | 33 +++ .../repository/v1/V1FullJsTreeBuilder.java | 248 ++++++++++++++++++ .../repository/v1/V1JsTreeRepositoryExtn.java | 139 ++++++++++ 4 files changed, 437 insertions(+), 2 deletions(-) create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java create mode 100644 backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index 789b60671..a7da5ab1e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -32,6 +32,10 @@ import java.util.Arrays; import java.util.Collections; +import java.nio.charset.StandardCharsets; +import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepositoryExtn; +import uk.ac.ebi.spot.ols.service.ViewMode; + /** * @author Simon Jupp * @date 02/11/15 @@ -56,6 +60,9 @@ public class V1OntologyTermController { @Autowired V1JsTreeRepository jsTreeRepository; + + @Autowired + V1JsTreeRepositoryExtn jsTreeRepositoryExtn; @Autowired V1GraphRepository graphRepository; @@ -425,8 +432,8 @@ HttpEntity graphJsTree( ontologyId = ontologyId.toLowerCase(); try { - String decodedTermId = UriUtils.decode(termId, "UTF-8"); - Object object= jsTreeRepository.getJsTreeForClass(decodedTermId, ontologyId, lang); + String decodedTermId = decodeUrl(termId); + Object object= jsTreeRepositoryExtn.getJsTreeForClassByViewMode(decodedTermId, ontologyId, lang, ViewMode.getFromShortName(viewMode), siblings); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); } catch (JsonProcessingException e) { @@ -787,5 +794,13 @@ HttpEntity> termHierarchicalAncestorsByOntology( public void handleError(HttpServletRequest req, Exception exception) { } + + private static String decodeUrl(String url) { + if(url.contains("%") || url.contains("+")) + { + return decodeUrl(java.net.URLDecoder.decode(url, StandardCharsets.UTF_8)); + } + return url; + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java index c0a142d76..7279c28fa 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java @@ -155,6 +155,39 @@ public Page recursivelyTraverseIncomingEdges(String type, String id return neo4jClient.queryPaginated(query, "c", countQuery, parameters("id", id), pageable); } + public Page recursivelyTraverseOutgoingEdgesWithSiblings(String type, String id, String ontologyId, List edgeIRIs, Map edgeProps, Pageable pageable) { + + String edge = makeEdgesList(edgeIRIs, Map.of()); + + String query = + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "RETURN DISTINCT ancestor as a " + + "UNION " + + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "OPTIONAL MATCH (ancestor)<-[:" + edge + "]-(descendant) " + + "RETURN DISTINCT descendant as a "; + + String countQuery = + "CALL {" + + "MATCH (a:" + type + ") WHERE a.id = $id " + + "WITH a " + + "OPTIONAL MATCH (a)-[edge:" + edge + " *]->(ancestor) " + + "RETURN DISTINCT ancestor as a " + + "UNION " + + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "OPTIONAL MATCH (ancestor)<-[:" + edge + "]-(descendant) " + + "RETURN DISTINCT descendant as a " + + "}" + + "RETURN count(*)"; + + return neo4jClient.queryPaginated(query, "a", countQuery, parameters("type", type, "id", id, "ontologyId", ontologyId), pageable); + } private static String makeEdgesList(List edgeIRIs, Map edgeProperties) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java new file mode 100644 index 000000000..77df1bedd --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java @@ -0,0 +1,248 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_DIRECT_CHILDREN; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +public class V1FullJsTreeBuilder { + + JsonObject thisEntity; + List parentRelationIRIs; + Set entities = new LinkedHashSet<>(); + Map entityIriToEntity = new HashMap<>(); + Multimap entityIriToChildIris = HashMultimap.create(); + Set toBeOpenedIris = new HashSet<>(); + + public V1FullJsTreeBuilder(JsonElement thisEntity, List ancestors, List parentRelationIRIs) { + + this.thisEntity = thisEntity.getAsJsonObject(); + this.parentRelationIRIs = parentRelationIRIs; + + // 1. put all entities (this entity + all ancestors) into an ordered set + + entities.add(thisEntity); + entities.addAll(ancestors); + + // 2. establish map of IRI -> entity + + for(JsonElement entity : entities) { + entityIriToEntity.put((String) entity.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(), entity); + } + + // 3. establish map of IRI -> children + + for(String entityIri : entityIriToEntity.keySet()) { + + JsonElement entity = entityIriToEntity.get(entityIri); + + for (String parentIri : getEntityParentIRIs(entity)) { + entityIriToChildIris.put(parentIri, entity.getAsJsonObject().get("iri").getAsString()); + } + } + + // 4. Get all Iri which needs to be opened + getAllIrisToBeOpen(); + } + + private void getAllIrisToBeOpen() { + Set unVisitedKeys = entityIriToChildIris.keySet(); + String selectedEntityIri = JsonHelper.getString(thisEntity, "iri"); + + for(String key : unVisitedKeys) { + // Check if the current key or any of its descendants contain the selectedEntityIri + if (checkIrisTobeOpen(key, selectedEntityIri)) { + toBeOpenedIris.add(key); + } + } + } + + private boolean checkIrisTobeOpen(String key, String selectedEntityIri) { + + // Check if the current key directly contains the selectedEntityIri + if (entityIriToChildIris.get(key).contains(selectedEntityIri)) { + toBeOpenedIris.add(key); + return true; + } + + // Recursively check children for the selectedEntityIri + for (String childKey : entityIriToChildIris.get(key)) { + if (checkIrisTobeOpen(childKey, selectedEntityIri)) { + toBeOpenedIris.add(key); + return true; + } + } + + return false; + } + + List> buildJsTree() { + + // 1. establish roots (entities with no parents) + + List roots = entities.stream() + .filter(entity -> getEntityParentIRIs(entity).size() == 0) + .collect(Collectors.toList()); + + // 2. build jstree entries starting with roots + + List> jstree = new ArrayList<>(); + + for(JsonElement root : roots) { + createJsTreeEntries(jstree, root.getAsJsonObject(), null); + } + + // 3. Retrieve parentIds which are not opened but has children nodes + Set parentIdsToBeRemoved = new HashSet<>(); + for (Map tree : jstree) { + // Check if the current tree map has a "parent" key that is not "#" + if (tree.containsKey("parent") && !"#".equals(tree.get("parent"))) { + String parentValue = (String) tree.get("parent"); + + // Find entries with matching "id" and where "opened" is false + jstree.stream() + .filter(tmpTree -> parentValue.equals(tmpTree.get("id"))) + .filter(tmpTree -> { + Map state = (Map) tmpTree.get("state"); + return state != null && Boolean.FALSE.equals(state.get("opened")); + }) + .map(tmpTree -> (String) tmpTree.get("id")) + .forEach(parentIdsToBeRemoved::add); + } + } + + // 4. Remove nodes which has parentIds retrieved in previous step(Step 3) + jstree.removeIf(map -> map.entrySet() + .stream() + .anyMatch(entry -> "parent".equals(entry.getKey()) && parentIdsToBeRemoved.contains(entry.getValue())) + ); + + return jstree; + } + + private void createJsTreeEntries(List> jstree, JsonObject entity, String concatenatedParentIris) { + + String entityIri = JsonHelper.getString(entity, "iri"); + + Map jstreeEntry = new LinkedHashMap<>(); + + if(concatenatedParentIris != null) { + jstreeEntry.put("id", base64Encode(concatenatedParentIris + ";" + entityIri)); + jstreeEntry.put("parent", base64Encode(concatenatedParentIris)); + } else { + jstreeEntry.put("id", base64Encode(entityIri)); + jstreeEntry.put("parent", "#"); + } + + jstreeEntry.put("iri", entityIri); + jstreeEntry.put("text", JsonHelper.getString(entity, "label")); + + Collection childIris = entityIriToChildIris.get(entityIri); + + // only the leaf node is selected (= highlighted in the tree) + boolean selected = JsonHelper.getString(thisEntity, "iri").equals(entityIri); + + // only nodes that aren't the leaf node are marked as opened (expanded) + boolean opened = toBeOpenedIris.contains(entityIri); + + + boolean hasDirectChildren = Objects.equals(JsonHelper.getString(entity, HAS_DIRECT_CHILDREN.getText()), "true"); + boolean hasHierarchicalChildren = Objects.equals(JsonHelper.getString(entity, HAS_DIRECT_CHILDREN.getText()), "true"); + + // only nodes that aren't already opened are marked as having children, (iff they actually have children!) + boolean children = (hasDirectChildren || hasHierarchicalChildren); + + //boolean children = childIris.size() > 0; + + Map state = new LinkedHashMap<>(); + state.put("opened", opened); + state.put("selected", selected); + + /* + * if(selected) { state.put("selected", true); }else { + * + * } + */ + + jstreeEntry.put("state", state); + jstreeEntry.put("children", children); + + Map attrObj = new LinkedHashMap<>(); + attrObj.put("iri", JsonHelper.getString(entity, "iri")); + attrObj.put("ontology_name", JsonHelper.getString(entity, "ontologyId")); + attrObj.put("title", JsonHelper.getString(entity, "iri")); + attrObj.put("class", "is_a"); + jstreeEntry.put("a_attr", attrObj); + + jstreeEntry.put("ontology_name", JsonHelper.getString(entity, "ontologyId")); + + jstree.add(jstreeEntry); + + for(String childIri : childIris) { + + JsonElement child = entityIriToEntity.get(childIri); + + if(child == null) { + // child is not in this tree (i.e. cousin of the node requested, will not be displayed) + continue; + } + + if(concatenatedParentIris != null) { + createJsTreeEntries(jstree, child.getAsJsonObject(), concatenatedParentIris + ";" + entityIri); + } else { + createJsTreeEntries(jstree, child.getAsJsonObject(), entityIri); + } + } + } + + private Set getEntityParentIRIs(JsonElement entity) { + + List parents = new ArrayList<>(); + + for(String parentRelationIri : parentRelationIRIs) { + parents.addAll( JsonHelper.getValues(entity.getAsJsonObject(), parentRelationIri) ); + } + + Set parentIris = new LinkedHashSet<>(); + + for (JsonElement parent : parents) { + + // extract value from reified parents + while(parent.isJsonObject()) { + parent = parent.getAsJsonObject().get("value"); + } + + String parentIri = parent.getAsString(); + + if(parentIri.equals("http://www.w3.org/2002/07/owl#Thing") + || parentIri.equals("http://www.w3.org/2002/07/owl#TopObjectProperty")) { + continue; + } + + parentIris.add(parentIri); + } + + return parentIris; + } + + static String base64Encode(String str) { + return Base64.getEncoder().encodeToString(str.getBytes(StandardCharsets.UTF_8)); + } +} + diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java new file mode 100644 index 000000000..651883d33 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java @@ -0,0 +1,139 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_DIRECT_PARENTS; +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_HIERARCHICAL_PARENTS; +import static uk.ac.ebi.ols.shared.DefinedFields.IS_OBSOLETE; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Component; + +import com.google.gson.JsonElement; + +import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; +import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; +import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; +import uk.ac.ebi.spot.ols.repository.solr.SearchType; +import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; +import uk.ac.ebi.spot.ols.service.ViewMode; + +@Component +public class V1JsTreeRepositoryExtn { + + @Autowired + OlsNeo4jClient neo4jClient; + + @Autowired + OlsSolrClient solrClient; + + @Autowired + V1JsTreeRepository v1JsTreeRepository; + + public List> getJsTreeForClassByViewMode(String iri, String ontologyId, String lang, ViewMode viewMode, + boolean sibling) { + + return getJSFullTreeForClass(iri, "class", "OntologyClass", ontologyId, lang, viewMode, sibling); + } + + private List> getJSFullTreeForClass(String iri, String type, String neo4jType, + String ontologyId, String lang, ViewMode viewMode, boolean sibling) { + + List parentRelationIRIs = List.of("directParent"); + + String thisEntityId = ontologyId + "+" + type + "+" + iri; + + JsonElement thisEntity = neo4jClient.getOne(neo4jType, Map.of("id", thisEntityId)); + thisEntity = LocalizationTransform.transform(thisEntity, lang); + + switch (viewMode) { + case ALL: + if (sibling) { + List ancestorsWithSiblings = neo4jClient + .recursivelyTraverseOutgoingEdgesWithSiblings(neo4jType, thisEntityId, ontologyId, + parentRelationIRIs, Map.of(), PageRequest.ofSize(100)) + .getContent(); + + ancestorsWithSiblings = ancestorsWithSiblings.stream() + .map(ancestor -> LocalizationTransform.transform(ancestor, lang)).collect(Collectors.toList()); + + // 1. Collect all "iri" values from ancestorsWithSiblings + Set ancestorIris = ancestorsWithSiblings.parallelStream() + .map(ancestor -> ancestor.getAsJsonObject().getAsJsonPrimitive("iri").getAsString()) + .collect(Collectors.toSet()); + // 2. Get Root elements by ontologyId + List roots = getRoots(ontologyId, false, lang, PageRequest.ofSize(100)); + + // 3. Add only unique elements from roots to ancestors based on "iri" + ancestorsWithSiblings.addAll(roots.stream().filter(root -> { + String rootIri = root.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(); + return !ancestorIris.contains(rootIri); + }) + .collect(Collectors.toList())); + + return (new V1FullJsTreeBuilder(thisEntity, ancestorsWithSiblings, parentRelationIRIs)).buildJsTree(); + } else { + return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + } + + default: + return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + } + } + + private List getRoots(String ontologyId, boolean obsolete, String lang, Pageable pageable) { + + OlsSolrQuery query = new OlsSolrQuery(); + query.addFilter("type", List.of("class"), SearchType.WHOLE_FIELD); + query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); + query.addFilter(HAS_DIRECT_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); + query.addFilter(HAS_HIERARCHICAL_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); + + if (!obsolete) + query.addFilter(IS_OBSOLETE.getText(), List.of("false"), SearchType.WHOLE_FIELD); + + return solrClient.searchSolrPaginated(query, pageable).stream().collect(Collectors.toList()); + } + + /* + * public Object getJsTreeForClassByViewMode(String iri, String ontologyId, + * String lang, String viewMode, boolean sibling) { + * + * Object res = (sibling) ? getJsTreeParentSiblingQuery(iri, ontologyId, lang, + * viewMode) : getJsTreeParentQuery(iri, ontologyId, lang, viewMode); + * + * return res; + * + * } + * + * private Object getJsTreeParentQuery(String iri, String ontologyId, String + * lang, String viewMode) { return null; } + * + * private Object getJsTreeParentSiblingQuery(String iri, String ontologyId, + * String lang, String viewMode) { List parentRelationIRIs = + * List.of("directParent"); String thisEntityId = ontologyId + "+class" + iri; + * + * JsonElement thisEntity = olsNeo4jClient.getOne("OntologyClass", Map.of("id", + * thisEntityId)); thisEntity = LocalizationTransform.transform(thisEntity, + * lang); switch(viewMode) { case "all": String query = """ MATCH path = + * (n:OntologyClass)-[r:directParent|hierarchicalParent*] + * ->(parent)<-[r2:directParent|hierarchicalParent]-(n1:OntologyClass) WHERE + * any(ontologyId in n.ontologyId where ontologyId=%s) and n.iri=%s UNWIND + * relationships(path) as r1 WITH r1 WHERE any(isObsolete in + * startNode(r1).isObsolete where isObsolete="false") RETURN distinct + * startNode(r1) as parents """ .formatted(ontologyId, iri); List + * res = neo4jClient.query(query, "parents"); res = res.stream().map(ancestor -> + * LocalizationTransform.transform(ancestor, + * lang)).collect(Collectors.toList()); + * + * return (new V1AncestorsJsTreeBuilder(thisEntity, res, + * parentRelationIRIs)).buildJsTree(); + * + * default: return getJsTreeForClass(iri, ontologyId, lang); } } + */ +} From a8bd1ea42eeaa2f30c49050de2afc6acb1246f4a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 15 Nov 2024 17:06:09 +0100 Subject: [PATCH 130/146] ontology filters are converted to default COMPOSITE in v2 statistics for #91 --- .../spot/ols/controller/api/v2/V2StatisticsController.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index af2ca9709..7f767b89f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -49,7 +49,7 @@ public HttpEntity getStatistics( @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + "Use Linear option to filter based on String and Collection based variables.") - @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, + @RequestParam(value = "option", required = false, defaultValue = "COMPOSITE") FilterOption filterOption, @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterOption,lang); @@ -75,7 +75,7 @@ HttpEntity getStatisticsBySchema( for (String key : keys) { Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); for (String value : values) { - summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,FilterOption.LINEAR,lang)); + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,FilterOption.COMPOSITE,lang)); } } From 6246cd3f00c26d5b0b927677fab6a99068582094 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 18 Nov 2024 13:50:28 +0100 Subject: [PATCH 131/146] computed last loaded time for subset of ontologies in #91 --- backend/pom.xml | 6 ++++++ .../api/v2/V2StatisticsController.java | 9 +++++---- .../ols/repository/v2/V2OntologyRepository.java | 16 ++++++++++++++++ 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/backend/pom.xml b/backend/pom.xml index a17640a3e..24b8b525e 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -167,6 +167,12 @@ 4.4 compile + + joda-time + joda-time + 2.13.0 + compile + diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 7f767b89f..9217eaea0 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -35,7 +35,7 @@ public class V2StatisticsController { @Operation(description = "Get Whole System Statistics. Components in all ontologies are taken into consideration") @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics() throws ResourceNotFoundException, IOException { - return new ResponseEntity<>( computeStats("*:*"), HttpStatus.OK); + return new ResponseEntity<>( computeStats("*:*", null), HttpStatus.OK); } @Operation(description = "Get Schema and Classification based Statistics. Possible schema keys and possible classification values of particular keys can be inquired with /api/ontologies/schemakeys and /api/ontologies/schemavalues methods respectively.") @@ -53,6 +53,7 @@ public HttpEntity getStatistics( @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterOption,lang); + String lastLoaded = ontologyRepository.getLastLoaded(ontologyIds,lang).toString(); StringBuilder sb = new StringBuilder(); String queryString = "none"; if(ontologyIds != null){ @@ -61,7 +62,7 @@ public HttpEntity getStatistics( } queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); } - return new ResponseEntity<>( computeStats(queryString), HttpStatus.OK); + return new ResponseEntity<>( computeStats(queryString, lastLoaded), HttpStatus.OK); } @Operation(description = "Get Composite Schema based Statistics. All schemas with their respective classifications under the classifications variable will be computed.") @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) @@ -82,11 +83,11 @@ HttpEntity getStatisticsBySchema( return new ResponseEntity<>( summaries, HttpStatus.OK); } - private V2Statistics computeStats(String queryString) throws IOException { + private V2Statistics computeStats(String queryString, String lastLoaded) throws IOException { Map coreStatus = solrClient.getCoreStatus(); Map indexStatus = (Map) coreStatus.get("index"); - String lastModified = (String) indexStatus.get("lastModified"); + String lastModified = lastLoaded == null ? (String) indexStatus.get("lastModified") : lastLoaded; SolrQuery query = new SolrQuery(); query.setQuery(queryString); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index ff7e478b9..ffa237f25 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -20,6 +20,9 @@ import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; + +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; import java.util.*; import java.io.IOException; @@ -103,6 +106,19 @@ public Set getOntologies(String lang){ return entities; } + public LocalDateTime getLastLoaded(Collection ontologies,String lang){ + LocalDateTime lastLoaded = LocalDateTime.MIN; + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS"); + for (V2Entity entity : getOntologies(lang)){ + if (ontologies.contains(entity.any().get("ontologyId").toString())){ + LocalDateTime dateTime = LocalDateTime.parse(entity.any().get("loaded").toString(), formatter); + if (dateTime.isAfter(lastLoaded)) + lastLoaded = dateTime; + } + } + return lastLoaded; + } + public Collection filterOntologyIDs(Collection schemas, Collection classifications, Collection ontologies, boolean exclusiveFilter, FilterOption filterOption, String lang){ if (schemas != null) schemas.remove(""); From 38bc3f8e60f57df987f33978c6f99d0db3b6ec11 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 18 Nov 2024 22:13:51 +0100 Subject: [PATCH 132/146] handled null and empty cases in composite filter for #91 --- .../repository/v1/V1OntologyRepository.java | 47 +++++++++--------- .../repository/v2/V2OntologyRepository.java | 48 ++++++++++--------- 2 files changed, 51 insertions(+), 44 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index 301997805..2f5a2449d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -195,18 +195,20 @@ public Set filterComposite(Collection schemas, Collection> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { - for (String schema: schemas) - if(classificationSchema.containsKey(schema)) - for (String classification: classifications) { - if (classificationSchema.get(schema) != null) - if (!classificationSchema.get(schema).isEmpty()) - if (classificationSchema.get(schema).contains(classification)) { - tempSet.add(ontologyDocument); - } - } + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } - } + } } } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { String schema = schemas.iterator().next(); @@ -214,17 +216,18 @@ public Set filterComposite(Collection schemas, Collection> classificationSchema : (Collection>>) ontologyDocument.config.classifications){ - if(classificationSchema.containsKey(schema)) - if (classificationSchema.get(schema) != null) - if (!classificationSchema.get(schema).isEmpty()){ - for (String s :classificationSchema.get(schema)) - System.out.println(s); - if(classificationSchema.get(schema).contains(classification)) - tempSet.add(ontologyDocument); - } - - } + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } + } } } else { for (V1Ontology ontologyDocument : getAll(lang)) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index ffa237f25..b759b34a2 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -111,7 +111,7 @@ public LocalDateTime getLastLoaded(Collection ontologies,String lang){ DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS"); for (V2Entity entity : getOntologies(lang)){ if (ontologies.contains(entity.any().get("ontologyId").toString())){ - LocalDateTime dateTime = LocalDateTime.parse(entity.any().get("loaded").toString(), formatter); + LocalDateTime dateTime = entity.any().get("loaded").toString() != null ? LocalDateTime.parse(entity.any().get("loaded").toString(), formatter) : LocalDateTime.MIN; if (dateTime.isAfter(lastLoaded)) lastLoaded = dateTime; } @@ -176,18 +176,20 @@ public Set filterComposite(Collection schemas, Collection> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { - for (String schema: schemas) - if(classificationSchema.containsKey(schema)) - for (String classification: classifications) { - if (classificationSchema.get(schema) != null) - if (!classificationSchema.get(schema).isEmpty()) - if (classificationSchema.get(schema).contains(classification)) { - tempSet.add(ontologyDocument); - } - } + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } - } + } } } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { String schema = schemas.iterator().next(); @@ -195,17 +197,19 @@ public Set filterComposite(Collection schemas, Collection> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")){ - if(classificationSchema.containsKey(schema)) - if (classificationSchema.get(schema) != null) - if (!classificationSchema.get(schema).isEmpty()){ - for (String s :classificationSchema.get(schema)) - System.out.println(s); - if(classificationSchema.get(schema).contains(classification)) - tempSet.add(ontologyDocument); - } + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } - } + } } } else { for (V2Entity ontologyDocument : getOntologies(lang)) { From 2bdb9db6f6adba3fa37c97828b943554ba2388ee Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 19 Nov 2024 11:03:27 +0100 Subject: [PATCH 133/146] added logic to handle property fullTree. Fixes #74 --- .../api/v1/V1OntologyPropertyController.java | 7 +- .../repository/v1/V1FullJsTreeBuilder.java | 20 +++--- .../repository/v1/V1JsTreeRepositoryExtn.java | 72 ++++++++----------- 3 files changed, 46 insertions(+), 53 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java index d823a9cbb..bb9e80325 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java @@ -23,8 +23,10 @@ import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.model.v1.V1Property; import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepository; +import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepositoryExtn; import uk.ac.ebi.spot.ols.repository.v1.V1PropertyRepository; import uk.ac.ebi.spot.ols.service.Neo4jClient; +import uk.ac.ebi.spot.ols.service.ViewMode; import javax.servlet.http.HttpServletRequest; import java.util.Arrays; @@ -43,6 +45,9 @@ public class V1OntologyPropertyController { @Autowired V1JsTreeRepository jsTreeRepository; + + @Autowired + V1JsTreeRepositoryExtn jsTreeRepositoryExtn; @Autowired Neo4jClient neo4jClient; @@ -269,7 +274,7 @@ HttpEntity getJsTree( try { String decoded = UriUtils.decode(termId, "UTF-8"); - Object object= jsTreeRepository.getJsTreeForProperty(decoded, ontologyId, lang); + Object object= jsTreeRepositoryExtn.getJsTreeForPropertyByViewMode(decoded, ontologyId, lang, ViewMode.getFromShortName(viewMode), siblings); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); } catch (JsonProcessingException e) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java index 77df1bedd..a27af17ae 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java @@ -21,6 +21,11 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; +/** + * @author Deepan Anbalagan + * @email deepan.anbalagan@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ public class V1FullJsTreeBuilder { JsonObject thisEntity; @@ -38,7 +43,12 @@ public V1FullJsTreeBuilder(JsonElement thisEntity, List ancestors, // 1. put all entities (this entity + all ancestors) into an ordered set entities.add(thisEntity); - entities.addAll(ancestors); + String thisEntityIri = (String) thisEntity.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(); + ancestors.parallelStream() + .filter(element -> { + return !((String) element.getAsJsonObject().getAsJsonPrimitive("iri").getAsString()).equals(thisEntityIri); + }) + .forEach(entities::add); // 2. establish map of IRI -> entity @@ -168,18 +178,10 @@ private void createJsTreeEntries(List> jstree, JsonObject ent // only nodes that aren't already opened are marked as having children, (iff they actually have children!) boolean children = (hasDirectChildren || hasHierarchicalChildren); - //boolean children = childIris.size() > 0; - Map state = new LinkedHashMap<>(); state.put("opened", opened); state.put("selected", selected); - /* - * if(selected) { state.put("selected", true); }else { - * - * } - */ - jstreeEntry.put("state", state); jstreeEntry.put("children", children); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java index 651883d33..3129f769f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java @@ -23,6 +23,11 @@ import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.service.ViewMode; +/** + * @author Deepan Anbalagan + * @email deepan.anbalagan@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ @Component public class V1JsTreeRepositoryExtn { @@ -38,10 +43,16 @@ public class V1JsTreeRepositoryExtn { public List> getJsTreeForClassByViewMode(String iri, String ontologyId, String lang, ViewMode viewMode, boolean sibling) { - return getJSFullTreeForClass(iri, "class", "OntologyClass", ontologyId, lang, viewMode, sibling); + return getJSFullTree(iri, "class", "OntologyClass", ontologyId, lang, viewMode, sibling); } + + public List> getJsTreeForPropertyByViewMode(String iri, String ontologyId, String lang, ViewMode viewMode, + boolean sibling) { - private List> getJSFullTreeForClass(String iri, String type, String neo4jType, + return getJSFullTree(iri, "property", "OntologyProperty", ontologyId, lang, viewMode, sibling); + } + + private List> getJSFullTree(String iri, String type, String neo4jType, String ontologyId, String lang, ViewMode viewMode, boolean sibling) { List parentRelationIRIs = List.of("directParent"); @@ -67,7 +78,7 @@ private List> getJSFullTreeForClass(String iri, String type, .map(ancestor -> ancestor.getAsJsonObject().getAsJsonPrimitive("iri").getAsString()) .collect(Collectors.toSet()); // 2. Get Root elements by ontologyId - List roots = getRoots(ontologyId, false, lang, PageRequest.ofSize(100)); + List roots = getRoots(ontologyId, type, false, lang, PageRequest.ofSize(100)); // 3. Add only unique elements from roots to ancestors based on "iri" ancestorsWithSiblings.addAll(roots.stream().filter(root -> { @@ -78,18 +89,18 @@ private List> getJSFullTreeForClass(String iri, String type, return (new V1FullJsTreeBuilder(thisEntity, ancestorsWithSiblings, parentRelationIRIs)).buildJsTree(); } else { - return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + return getDefaultJsTreeByType(iri, ontologyId, lang, type); } default: - return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + return getDefaultJsTreeByType(iri, ontologyId, lang, type); } } - private List getRoots(String ontologyId, boolean obsolete, String lang, Pageable pageable) { + private List getRoots(String ontologyId, String type, boolean obsolete, String lang, Pageable pageable) { OlsSolrQuery query = new OlsSolrQuery(); - query.addFilter("type", List.of("class"), SearchType.WHOLE_FIELD); + query.addFilter("type", List.of(type), SearchType.WHOLE_FIELD); query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); query.addFilter(HAS_DIRECT_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); query.addFilter(HAS_HIERARCHICAL_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); @@ -100,40 +111,15 @@ private List getRoots(String ontologyId, boolean obsolete, String l return solrClient.searchSolrPaginated(query, pageable).stream().collect(Collectors.toList()); } - /* - * public Object getJsTreeForClassByViewMode(String iri, String ontologyId, - * String lang, String viewMode, boolean sibling) { - * - * Object res = (sibling) ? getJsTreeParentSiblingQuery(iri, ontologyId, lang, - * viewMode) : getJsTreeParentQuery(iri, ontologyId, lang, viewMode); - * - * return res; - * - * } - * - * private Object getJsTreeParentQuery(String iri, String ontologyId, String - * lang, String viewMode) { return null; } - * - * private Object getJsTreeParentSiblingQuery(String iri, String ontologyId, - * String lang, String viewMode) { List parentRelationIRIs = - * List.of("directParent"); String thisEntityId = ontologyId + "+class" + iri; - * - * JsonElement thisEntity = olsNeo4jClient.getOne("OntologyClass", Map.of("id", - * thisEntityId)); thisEntity = LocalizationTransform.transform(thisEntity, - * lang); switch(viewMode) { case "all": String query = """ MATCH path = - * (n:OntologyClass)-[r:directParent|hierarchicalParent*] - * ->(parent)<-[r2:directParent|hierarchicalParent]-(n1:OntologyClass) WHERE - * any(ontologyId in n.ontologyId where ontologyId=%s) and n.iri=%s UNWIND - * relationships(path) as r1 WITH r1 WHERE any(isObsolete in - * startNode(r1).isObsolete where isObsolete="false") RETURN distinct - * startNode(r1) as parents """ .formatted(ontologyId, iri); List - * res = neo4jClient.query(query, "parents"); res = res.stream().map(ancestor -> - * LocalizationTransform.transform(ancestor, - * lang)).collect(Collectors.toList()); - * - * return (new V1AncestorsJsTreeBuilder(thisEntity, res, - * parentRelationIRIs)).buildJsTree(); - * - * default: return getJsTreeForClass(iri, ontologyId, lang); } } - */ + private List> getDefaultJsTreeByType(String iri, String ontologyId, String lang, String type){ + + switch (type) { + case "class": + return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + case "property": + return v1JsTreeRepository.getJsTreeForProperty(iri, ontologyId, lang); + default: + return null; + } + } } From 585aa6886f1ce46a2e61176e8cb625207f6dae23 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 19 Nov 2024 11:25:19 +0100 Subject: [PATCH 134/146] removed unnecessary parameter from allstatsbyschema endpoint for #91 --- .../ebi/spot/ols/controller/api/v2/V2StatisticsController.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 9217eaea0..fc0536ff9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -67,9 +67,7 @@ public HttpEntity getStatistics( @Operation(description = "Get Composite Schema based Statistics. All schemas with their respective classifications under the classifications variable will be computed.") @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity getStatisticsBySchema( - @RequestParam(value = "schema", required = false) Collection schemas, @RequestParam(value = "lang", defaultValue = "en") String lang - ) throws IOException { MultiKeyMap summaries = new MultiKeyMap(); Collection keys = ontologyRepository.getSchemaKeys(lang); From 9afe62a2106ee5af3fcc3e2874192944e6cde803 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Tue, 19 Nov 2024 17:12:12 +0100 Subject: [PATCH 135/146] readded schema parameter on allstatsbyschema endpoint for #91 --- .../spot/ols/controller/api/v2/V2StatisticsController.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index fc0536ff9..503d8fba3 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -1,5 +1,6 @@ package uk.ac.ebi.spot.ols.controller.api.v2; +import com.google.common.collect.Sets; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import org.apache.commons.collections4.map.MultiKeyMap; @@ -67,10 +68,11 @@ public HttpEntity getStatistics( @Operation(description = "Get Composite Schema based Statistics. All schemas with their respective classifications under the classifications variable will be computed.") @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity getStatisticsBySchema( + @RequestParam(value = "schema", required = false) Set schemas, @RequestParam(value = "lang", defaultValue = "en") String lang ) throws IOException { MultiKeyMap summaries = new MultiKeyMap(); - Collection keys = ontologyRepository.getSchemaKeys(lang); + Set keys = schemas == null || schemas.isEmpty() ? ontologyRepository.getSchemaKeys(lang) : Sets.intersection(ontologyRepository.getSchemaKeys(lang),schemas); for (String key : keys) { Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); for (String value : values) { From ba1a070a6c221e58415a1d209817636e70e55329 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Wed, 20 Nov 2024 17:42:44 +0100 Subject: [PATCH 136/146] used annotation extractor class for mapping in #96 --- .../ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index 4232b3cea..62a68c08c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -58,7 +58,7 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.classifications = gson.fromJson(localizedJson.get("classifications"), Collection.class); ontology.config.license = gson.fromJson(localizedJson.get("license"), License.class); - ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); + ontology.config.annotations = AnnotationExtractor.extractAnnotations(localizedJson); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); From a91be29aa02f1c3f333462681db585397ee982f4 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 21 Nov 2024 16:21:16 +0100 Subject: [PATCH 137/146] added relatedfrom and equivalentclasses endpoint for #98 --- .../api/v1/V1OntologyTermController.java | 39 +++++++++++++++++++ .../ols/repository/v1/V1GraphRepository.java | 18 +++++++-- 2 files changed, 53 insertions(+), 4 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index 789b60671..eb9622533 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -31,6 +31,8 @@ import javax.servlet.http.HttpServletRequest; import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.Map; /** * @author Simon Jupp @@ -406,6 +408,43 @@ HttpEntity> ancestors(@PathVariable("onto") return new ResponseEntity<>( assembler.toModel(ancestors, termAssembler), HttpStatus.OK); } + @RequestMapping(path = "/{onto}/terms/{iri}/equivalentclasses", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity>> getEquivalentClasses( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { + + ontologyId = ontologyId.toLowerCase(); + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + return new ResponseEntity<>( graphRepository.getEquivalentClass(entityId), HttpStatus.OK); + } + + + @RequestMapping(path = "/{onto}/terms/{iri}/relatedfrom", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getRelatedFrom( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { + + ontologyId = ontologyId.toLowerCase(); + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + return new ResponseEntity<>( graphRepository.getRelatedFrom(entityId), HttpStatus.OK); + } + @RequestMapping(path = "/{onto}/terms/{iri}/jstree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index 2007ac9ed..cb727192c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -4,6 +4,8 @@ import org.neo4j.driver.types.Node; import org.neo4j.driver.types.Relationship; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; @@ -132,19 +134,27 @@ Map getParentsAndRelatedTo(String entityId) { return (Map) results.get(0).get("result"); } - Map getRelatedFrom(String entityId) { - + public Map getRelatedFrom(String entityId) { String query = "MATCH path = (x)-[r:relatedTo]->(n:OntologyClass)\n" + "WHERE n.id=\"" + entityId + "\"\n" - + "RETURN { nodes: collect(distinct x),\n" - + "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: r })\n" + + "RETURN { nodes: collect({ label: x.label, iri: x.iri }),\n" + + "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: type(r) })\n" + "} AS result"; List> results = neo4jClient.rawQuery(query); return (Map) results.get(0).get("result"); } + public List> getEquivalentClass(String entityId) { + String query = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + + "WHERE a.id = '"+entityId+"' RETURN {nodes: collect( DISTINCT { label: b.label, iri: b.iri })," + + "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: type(r) })} AS result"; + + List> results = neo4jClient.rawQuery(query); + return results; + } JsonObject getOntologyNodeJson(Node node, String lang) { JsonElement ontologyNodeObject = new JsonObject(); From d8d78905a7fba2458f6c1b5ce4586e147fc82972 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Thu, 21 Nov 2024 17:00:28 +0100 Subject: [PATCH 138/146] updated logic to get label for relationship. Fixes #99 --- .../ols/repository/v1/V1GraphRepository.java | 50 ++++++++++++++++++- 1 file changed, 49 insertions(+), 1 deletion(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index 2007ac9ed..f217c78e9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -40,6 +40,7 @@ public Map getGraphForIndividual(String iri, String ontologyId, private Map getGraphForEntity(String iri, String type, String neo4jType, String ontologyId, String lang) { String thisEntityId = ontologyId + "+" + type + "+" + iri; + List selectedNode = new ArrayList<>(); // String parentsQuery = // "MATCH path = (n:OntologyClass)-[r:directParent|relatedTo]-(parent)\n" @@ -67,6 +68,10 @@ private Map getGraphForEntity(String iri, String type, String ne List> nodes = allNodes.stream().map(node -> { JsonObject ontologyNodeObject = getOntologyNodeJson(node, lang); + + if(iri.equals(JsonHelper.getString(ontologyNodeObject, "iri"))) { + selectedNode.add(ontologyNodeObject); + } JsonObject linkedEntities = ontologyNodeObject.getAsJsonObject("linkedEntities"); if(linkedEntities != null) { @@ -95,7 +100,7 @@ private Map getGraphForEntity(String iri, String type, String ne JsonObject ontologyEdgeObject = getOntologyEdgeJson(relationship, lang); - String uri = JsonHelper.getString(ontologyEdgeObject, "property"); + String uri = resolveUri(result, selectedNode, iri); if (uri == null) { uri = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; } @@ -166,5 +171,48 @@ JsonObject getOntologyEdgeJson(Relationship r, String lang) { ).getAsJsonObject(); } + private String resolveUri(Map result, List selectedNode, String iri) { + if (selectedNode == null || selectedNode.isEmpty()) { + return null; + } + + JsonObject selectedOntologyNodeObject = selectedNode.get(0); + if (iri.equals(result.get("source"))) { + return getRelatedProperty(selectedOntologyNodeObject, "relatedTo", (String) result.get("target")); + } else { + return getRelatedProperty(selectedOntologyNodeObject, "relatedFrom", (String) result.get("source")); + } + } + + private String getRelatedProperty(JsonObject ontologyNode, String relationKey, String targetOrSourceIri) { + if (ontologyNode == null || !ontologyNode.has(relationKey)) { + return null; + } + + JsonElement related = ontologyNode.get(relationKey); + if (related == null || related instanceof JsonNull) { + return null; + } + + if (related.isJsonArray()) { + for (JsonElement element : related.getAsJsonArray()) { + JsonObject relationObject = element.getAsJsonObject(); + if (relationObject != null + && targetOrSourceIri.equals(JsonHelper.getString(relationObject, "value")) + && relationObject.has("property")) { + return JsonHelper.getString(relationObject, "property"); + } + } + } else if (related.isJsonObject()) { + JsonObject relationObject = related.getAsJsonObject(); + if (relationObject != null + && targetOrSourceIri.equals(JsonHelper.getString(relationObject, "value")) + && relationObject.has("property")) { + return JsonHelper.getString(relationObject, "property"); + } + } + + return null; + } } From 3fce34ba572cfbd432c8e6861c23b1ca124aff41 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 22 Nov 2024 16:12:42 +0100 Subject: [PATCH 139/146] added a paginated term instances endpoint and made related from and equivalent class endpoints paginated for #98 --- .../api/v1/V1OntologyTermController.java | 61 +++++++++++++++++-- .../ols/repository/v1/V1GraphRepository.java | 42 ++++++++++--- 2 files changed, 89 insertions(+), 14 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index eb9622533..d328b1d3a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -22,6 +22,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; +import uk.ac.ebi.spot.ols.model.v1.V1Individual; import uk.ac.ebi.spot.ols.model.v1.V1Term; import uk.ac.ebi.spot.ols.repository.v1.V1GraphRepository; import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepository; @@ -53,6 +54,9 @@ public class V1OntologyTermController { @Autowired V1TermAssembler termAssembler; + @Autowired + V1IndividualAssembler individualAssembler; + @Autowired V1PreferredRootTermAssembler preferredRootTermAssembler; @@ -410,7 +414,7 @@ HttpEntity> ancestors(@PathVariable("onto") @RequestMapping(path = "/{onto}/terms/{iri}/equivalentclasses", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) - HttpEntity>> getEquivalentClasses( + HttpEntity> getEquivalentClasses( @PathVariable("onto") @Parameter(name = "onto", description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", @@ -418,18 +422,54 @@ HttpEntity>> getEquivalentClasses( @PathVariable("iri") @Parameter(name = "iri", description = "The IRI of the term, this value must be single URL encoded", - example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { ontologyId = ontologyId.toLowerCase(); + String decoded = UriUtils.decode(termId, "UTF-8"); String entityId = ontologyId+"+class+"+decoded; - return new ResponseEntity<>( graphRepository.getEquivalentClass(entityId), HttpStatus.OK); + Page equivalentClasses = graphRepository.getEquivalentClassPaginated(entityId, lang, pageable); + if (equivalentClasses == null) + throw new ResourceNotFoundException("No equivalent classes could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(equivalentClasses, termAssembler), HttpStatus.OK); } @RequestMapping(path = "/{onto}/terms/{iri}/relatedfrom", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) - HttpEntity> getRelatedFrom( + HttpEntity> getRelatedFrom( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page relatedFroms = graphRepository.getRelatedFromPaginated(entityId, lang, pageable); + if (relatedFroms == null) + throw new ResourceNotFoundException("No related from terms could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(relatedFroms, termAssembler), HttpStatus.OK); + } + + @RequestMapping(path = "/{onto}/terms/{iri}/instances", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getInstances( @PathVariable("onto") @Parameter(name = "onto", description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", @@ -437,12 +477,21 @@ HttpEntity> getRelatedFrom( @PathVariable("iri") @Parameter(name = "iri", description = "The IRI of the term, this value must be single URL encoded", - example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { ontologyId = ontologyId.toLowerCase(); + String decoded = UriUtils.decode(termId, "UTF-8"); String entityId = ontologyId+"+class+"+decoded; - return new ResponseEntity<>( graphRepository.getRelatedFrom(entityId), HttpStatus.OK); + Page instances = graphRepository.getTermInstancesPaginated(entityId, lang, pageable); + if (instances == null) + throw new ResourceNotFoundException("No instances could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(instances, individualAssembler), HttpStatus.OK); } @RequestMapping(path = "/{onto}/terms/{iri}/jstree", diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index cb727192c..e2079099c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -7,13 +7,19 @@ import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.v1.V1Individual; +import uk.ac.ebi.spot.ols.model.v1.V1Term; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; +import uk.ac.ebi.spot.ols.repository.v1.mappers.V1IndividualMapper; +import uk.ac.ebi.spot.ols.repository.v1.mappers.V1TermMapper; import uk.ac.ebi.spot.ols.service.Neo4jClient; import java.util.*; import java.util.stream.Collectors; +import static org.neo4j.driver.Values.parameters; + @Component public class V1GraphRepository { @@ -134,26 +140,46 @@ Map getParentsAndRelatedTo(String entityId) { return (Map) results.get(0).get("result"); } - public Map getRelatedFrom(String entityId) { + Map getRelatedFrom(String entityId) { + String query = "MATCH path = (x)-[r:relatedTo]->(n:OntologyClass)\n" + "WHERE n.id=\"" + entityId + "\"\n" - + "RETURN { nodes: collect({ label: x.label, iri: x.iri }),\n" - + "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: type(r) })\n" + + "RETURN { nodes: collect(distinct x),\n" + + "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: r })\n" + "} AS result"; List> results = neo4jClient.rawQuery(query); return (Map) results.get(0).get("result"); } - public List> getEquivalentClass(String entityId) { + public Page getRelatedFromPaginated(String entityId, String lang, Pageable pageable) { + String query = "MATCH (x:OntologyClass)-[r:relatedTo]->(n:OntologyClass) WHERE n.id= $id RETURN x"; + String countQuery = "MATCH (x:OntologyClass)-[r:relatedTo]->(n:OntologyClass) WHERE n.id= $id RETURN count(x)"; + + return neo4jClient.queryPaginated(query, "x", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + + public Page getEquivalentClassPaginated(String entityId, String lang, Pageable pageable) { String query = "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + - "WHERE a.id = '"+entityId+"' RETURN {nodes: collect( DISTINCT { label: b.label, iri: b.iri })," + - "edges: collect({ source: startNode(r).iri, target: endNode(r).iri, relationship: type(r) })} AS result"; + "WHERE a.id = $id RETURN DISTINCT b"; + String countQuery = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + + "WHERE a.id = $id RETURN count(DISTINCT b)"; - List> results = neo4jClient.rawQuery(query); - return results; + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + + public Page getTermInstancesPaginated(String entityId, String lang, Pageable pageable) { + String query = + "MATCH (a:OntologyClass)<-[r:`http://www.w3.org/1999/02/22-rdf-syntax-ns#type`]-(b:OntologyIndividual) " + + "WHERE a.id = $id RETURN b"; + String countQuery = + "MATCH (a:OntologyClass)<-[r:`http://www.w3.org/1999/02/22-rdf-syntax-ns#type`]-(b:OntologyIndividual) " + + "WHERE a.id = $id RETURN count(b)"; + + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1IndividualMapper.mapIndividual(record, lang)); } JsonObject getOntologyNodeJson(Node node, String lang) { From b8a0bcdbeeba4559f12ea038622a754cefe12b4c Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Fri, 22 Nov 2024 19:31:03 +0100 Subject: [PATCH 140/146] added json endpoint for v1 term in #98 --- .../api/v1/V1OntologyTermController.java | 27 +++++++++++++++++++ .../ols/repository/v1/V1GraphRepository.java | 6 +++++ 2 files changed, 33 insertions(+) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index d328b1d3a..dded4e4ba 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -494,6 +494,33 @@ HttpEntity> getInstances( return new ResponseEntity<>( assembler.toModel(instances, individualAssembler), HttpStatus.OK); } + @RequestMapping(path = "/{onto}/terms/{iri}/json", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity getJson( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + String json = graphRepository.getTermJson(entityId); + if (json == null) + throw new ResourceNotFoundException("No instances could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( json, HttpStatus.OK); + } + @RequestMapping(path = "/{onto}/terms/{iri}/jstree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index e2079099c..c81781176 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -182,6 +182,12 @@ public Page getTermInstancesPaginated(String entityId, String lang return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1IndividualMapper.mapIndividual(record, lang)); } + public String getTermJson(String entityId) { + String query = "MATCH (a:OntologyClass) WHERE a.id = '"+entityId+"' RETURN a._json AS result"; + List> results = neo4jClient.rawQuery(query); + return results.get(0).get("result").toString(); + } + JsonObject getOntologyNodeJson(Node node, String lang) { JsonElement ontologyNodeObject = new JsonObject(); if(node.asMap().get("_json") != null && node.asMap().get("_json") instanceof String) From 2247f762c0a90da5a788e49a0ee1c0c93b1533f0 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Sat, 23 Nov 2024 14:01:04 +0100 Subject: [PATCH 141/146] added superclasses endpoint for #98 --- .../api/v1/V1OntologyTermController.java | 27 +++++++++++++++++++ .../ols/repository/v1/V1GraphRepository.java | 11 ++++++++ 2 files changed, 38 insertions(+) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index dded4e4ba..8e3957b9f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -412,6 +412,33 @@ HttpEntity> ancestors(@PathVariable("onto") return new ResponseEntity<>( assembler.toModel(ancestors, termAssembler), HttpStatus.OK); } + @RequestMapping(path = "/{onto}/terms/{iri}/superclasses", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSuperClasses( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page superClasses = graphRepository.getSuperClassPaginated(entityId, lang, pageable); + if (superClasses == null) + throw new ResourceNotFoundException("No super classes could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(superClasses, termAssembler), HttpStatus.OK); + } + @RequestMapping(path = "/{onto}/terms/{iri}/equivalentclasses", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getEquivalentClasses( diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index c81781176..b870ad7d3 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -160,6 +160,17 @@ public Page getRelatedFromPaginated(String entityId, String lang, Pageab return neo4jClient.queryPaginated(query, "x", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); } + public Page getSuperClassPaginated(String entityId, String lang, Pageable pageable) { + String query = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2000/01/rdf-schema#subClassOf`]->(b:OntologyClass) " + + "WHERE a.id = $id RETURN b"; + String countQuery = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2000/01/rdf-schema#subClassOf`]->(b:OntologyClass) " + + "WHERE a.id = $id RETURN count(b)"; + + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + public Page getEquivalentClassPaginated(String entityId, String lang, Pageable pageable) { String query = "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + From 3542b963bde1d8fa86f216023f5eeecd5344f364 Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Mon, 25 Nov 2024 10:57:02 +0100 Subject: [PATCH 142/146] removed unnecessary parameters from json endpoint for #98 --- .../ols/controller/api/v1/V1OntologyTermController.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index 8e3957b9f..c74e8a009 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -531,10 +531,7 @@ HttpEntity getJson( @PathVariable("iri") @Parameter(name = "iri", description = "The IRI of the term, this value must be single URL encoded", - example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, - @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, - @Parameter(hidden = true) Pageable pageable, - @Parameter(hidden = true) PagedResourcesAssembler assembler) { + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { ontologyId = ontologyId.toLowerCase(); @@ -542,7 +539,7 @@ HttpEntity getJson( String entityId = ontologyId+"+class+"+decoded; String json = graphRepository.getTermJson(entityId); if (json == null) - throw new ResourceNotFoundException("No instances could be found for " + ontologyId + throw new ResourceNotFoundException("No _json could be found for " + ontologyId + " and " + termId); return new ResponseEntity<>( json, HttpStatus.OK); From 3a5067b8eb0c75d0eb24158880d9fa072b30fb97 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 10 Dec 2024 09:36:33 +0100 Subject: [PATCH 143/146] merged with EBI latest changes and included fix for fibo --- .../rdf2json/annotators/RelatedAnnotator.java | 230 +++++++++++------- 1 file changed, 139 insertions(+), 91 deletions(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index a6bdfb22d..e0d98bacc 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -11,29 +11,34 @@ import java.util.*; import java.util.stream.Collectors; +import static uk.ac.ebi.ols.shared.DefinedFields.RELATED_FROM; +import static uk.ac.ebi.ols.shared.DefinedFields.RELATED_TO; + public class RelatedAnnotator { private static final Logger logger = LoggerFactory.getLogger(RelatedAnnotator.class); - public static void annotateRelated(OntologyGraph graph) { + public void annotateRelated(OntologyGraph graph) { Set ontologyBaseUris = OntologyBaseUris.getOntologyBaseUris(graph); String preferredPrefix = (String)graph.config.get("preferredPrefix"); long startTime3 = System.nanoTime(); - for(String id : graph.nodes.keySet()) { - OntologyNode c = graph.nodes.get(id); - if (c.types.contains(OntologyNode.NodeType.CLASS)) { + RelatedInfo relatedInfo = new RelatedInfo(); + for(String id : graph.nodes.keySet()) { + OntologyNode c = graph.nodes.get(id); + if (c.types.contains(OntologyNode.NodeType.CLASS)) { + + // skip bnodes + if(c.uri == null) + continue; - // skip bnodes - if(c.uri == null) - continue; + List parents = c.properties.getPropertyValues("http://www.w3.org/2000/01/rdf-schema#subClassOf"); - List parents = c.properties.getPropertyValues("http://www.w3.org/2000/01/rdf-schema#subClassOf"); + if(parents != null) { - if(parents != null) { + for(PropertyValue parent : parents) { - for(PropertyValue parent : parents) { // We are only looking for anonymous parents, which are either class expressions or restrictions. // @@ -43,45 +48,49 @@ public static void annotateRelated(OntologyGraph graph) { OntologyNode parentClassExprOrRestriction = graph.nodes.get( ((PropertyValueBNode) parent).getId() ); - PropertyValue onProperty = parentClassExprOrRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#onProperty"); + PropertyValue onProperty = parentClassExprOrRestriction.properties + .getPropertyValue("http://www.w3.org/2002/07/owl#onProperty"); if(onProperty == null) { - annotateRelated_Class_subClassOf_ClassExpr( + relatedInfo = annotateRelated_Class_subClassOf_ClassExpr(relatedInfo, c, parentClassExprOrRestriction, ontologyBaseUris, preferredPrefix, graph); } else { - annotateRelated_Class_subClassOf_Restriction( - c, onProperty, parentClassExprOrRestriction, ontologyBaseUris, preferredPrefix, graph); + relatedInfo = annotateRelated_Class_subClassOf_Restriction(relatedInfo, + c, onProperty, parentClassExprOrRestriction, graph); } } } - } - } - long endTime3 = System.nanoTime(); - logger.info("annotate related: {}", ((endTime3 - startTime3) / 1000 / 1000 / 1000)); - } + } + } + relatedInfo.updateOntologyNodesWithRelatedLists(); + long endTime3 = System.nanoTime(); + logger.info("annotate related: {}", ((endTime3 - startTime3) / 1000 / 1000 / 1000)); + } - private static void annotateRelated_Class_subClassOf_ClassExpr( - OntologyNode classNode, OntologyNode fillerClassExpr, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_ClassExpr(RelatedInfo relatedInfo, + OntologyNode classNode, OntologyNode fillerClassExpr, Set ontologyBaseUris, + String preferredPrefix, OntologyGraph graph) { PropertyValue oneOf = fillerClassExpr.properties.getPropertyValue("http://www.w3.org/2002/07/owl#oneOf"); if(oneOf != null) { // This is a oneOf class expression - annotateRelated_Class_subClassOf_ClassExpr_oneOf(classNode, fillerClassExpr, oneOf, graph); - return; + return annotateRelated_Class_subClassOf_ClassExpr_oneOf(relatedInfo, classNode, fillerClassExpr, oneOf, graph); } PropertyValue intersectionOf = fillerClassExpr.properties.getPropertyValue("http://www.w3.org/2002/07/owl#intersectionOf"); if(intersectionOf != null) { // This is an intersectionOf class expression (anonymous conjunction) - annotateRelated_Class_subClassOf_ClassExpr_intersectionOf(classNode, fillerClassExpr, intersectionOf, graph); - return; + return annotateRelated_Class_subClassOf_ClassExpr_intersectionOf(relatedInfo, classNode, fillerClassExpr, + intersectionOf, graph); } + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_ClassExpr_oneOf(OntologyNode classNode, OntologyNode fillerClassExpr, PropertyValue filler, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_ClassExpr_oneOf(RelatedInfo relatedInfo, OntologyNode classNode, + OntologyNode fillerClassExpr, PropertyValue filler, OntologyGraph graph) { // The filler is an RDF list of Individuals @@ -94,14 +103,17 @@ private static void annotateRelated_Class_subClassOf_ClassExpr_oneOf(OntologyNod .collect(Collectors.toList()); for(OntologyNode individualNode : fillerIndividuals) { - classNode.properties.addProperty("relatedTo", + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerClassExpr, "http://www.w3.org/2000/01/rdf-schema#subClassOf", individualNode)); - individualNode.properties.addProperty("relatedFrom", + + relatedInfo.addRelatedFrom(individualNode, new PropertyValueRelated(fillerClassExpr, "http://www.w3.org/2000/01/rdf-schema#subClassOf", classNode)); } + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_ClassExpr_intersectionOf(OntologyNode classNode, OntologyNode fillerClassExpr, PropertyValue filler, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_ClassExpr_intersectionOf(RelatedInfo relatedInfo, + OntologyNode classNode, OntologyNode fillerClassExpr, PropertyValue filler, OntologyGraph graph) { // The filler is an RDF list of Classes @@ -118,79 +130,83 @@ private static void annotateRelated_Class_subClassOf_ClassExpr_intersectionOf(On // Named nodes only. TODO what to do about bnodes in this case? if(fillerClassNode.uri != null) { - classNode.properties.addProperty("relatedTo", + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerClassExpr, "http://www.w3.org/2000/01/rdf-schema#subClassOf", fillerClassNode)); - fillerClassNode.properties.addProperty("relatedFrom", + relatedInfo.addRelatedFrom(fillerClassNode, new PropertyValueRelated(fillerClassExpr, "http://www.w3.org/2000/01/rdf-schema#subClassOf", classNode)); } } + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_Restriction( - OntologyNode classNode, PropertyValue property, OntologyNode fillerRestriction, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_Restriction(RelatedInfo relatedInfo, + OntologyNode classNode, PropertyValue property, OntologyNode fillerRestriction, + OntologyGraph graph) { if(property.getType() != PropertyValue.Type.URI) { // We can't do anything with anonymous properties. - return; + return relatedInfo; } PropertyValue onProperty = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#onProperty"); if(onProperty == null || onProperty.getType() != PropertyValue.Type.URI) - return; + return relatedInfo; String propertyUri = ((PropertyValueURI) onProperty).getUri(); PropertyValue someValuesFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); if(someValuesFrom != null) { // This is a someValuesFrom restriction - annotateRelated_Class_subClassOf_Restriction_someValuesFrom( - classNode, propertyUri, fillerRestriction, someValuesFrom, ontologyBaseUris, preferredPrefix, graph); - return; + return annotateRelated_Class_subClassOf_Restriction_someValuesFrom(relatedInfo, + classNode, propertyUri, fillerRestriction, someValuesFrom, graph); } PropertyValue hasValue = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#hasValue"); if(hasValue != null) { // This is a hasValue restriction. The value can be either an individual or a literal data value. // - annotateRelated_Class_subClassOf_Restriction_hasValue(classNode, propertyUri, fillerRestriction, hasValue, graph); - return; + return annotateRelated_Class_subClassOf_Restriction_hasValue(relatedInfo, classNode, propertyUri, + fillerRestriction, hasValue, graph); + } + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom( - OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_someValuesFrom(RelatedInfo relatedInfo, + OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, + OntologyGraph graph) { if(filler.getType() == PropertyValue.Type.URI) { String fillerUri = ((PropertyValueURI) filler).getUri(); - // Is the filler different from the entity we are annotating? - if(!fillerUri.equals(classNode.uri)) { - - OntologyNode fillerNode = graph.nodes.get(fillerUri); - - if(fillerNode != null) { // sometimes filler not included in ontology, e.g. "subClassOf some xsd:float" in cdao - PropertyValue someValuesFrom = null; - if(fillerRestriction != null) - someValuesFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); - - if(someValuesFrom != null) { - if(!((PropertyValueURI) someValuesFrom).getUri().equalsIgnoreCase(fillerUri)) { - classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); - fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + // Is the filler different from the entity we are annotating? + if(!fillerUri.equals(classNode.uri)) { + + OntologyNode fillerNode = graph.nodes.get(fillerUri); + + if(fillerNode != null) { // sometimes filler not included in ontology, e.g. "subClassOf some xsd:float" in cdao + PropertyValue someValuesFrom = null; + if(fillerRestriction != null) + someValuesFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); + + if(someValuesFrom != null) { + if(!((PropertyValueURI) someValuesFrom).getUri().equalsIgnoreCase(fillerUri)) { + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + relatedInfo.addRelatedFrom(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + } + } + else { + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + relatedInfo.addRelatedFrom(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); } - } - else { - classNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); - fillerNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); } } - } - return; + return relatedInfo; } @@ -201,26 +217,25 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom( PropertyValue oneOf = fillerClassExpr.properties.getPropertyValue("http://www.w3.org/2002/07/owl#oneOf"); if(oneOf != null) { // This is a oneOf class expression - annotateRelated_Class_subClassOf_Restriction_someValuesFrom_oneOf(classNode, propertyUri, fillerClassExpr, oneOf, ontologyBaseUris, preferredPrefix, graph); - return; + return annotateRelated_Class_subClassOf_Restriction_someValuesFrom_oneOf(relatedInfo, classNode, propertyUri, oneOf, graph); } PropertyValue intersectionOf = fillerClassExpr.properties.getPropertyValue("http://www.w3.org/2002/07/owl#intersectionOf"); if(intersectionOf != null) { // This is an intersectionOf class expression (anonymous conjunction) - annotateRelated_Class_subClassOf_Restriction_someValuesFrom_intersectionOf(classNode, propertyUri, fillerClassExpr, intersectionOf, ontologyBaseUris, preferredPrefix, graph); - return; + return annotateRelated_Class_subClassOf_Restriction_someValuesFrom_intersectionOf(relatedInfo, classNode, + propertyUri, fillerClassExpr, intersectionOf, graph); } } - + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_oneOf( - OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_someValuesFrom_oneOf(RelatedInfo relatedInfo, + OntologyNode classNode, String propertyUri, PropertyValue filler, OntologyGraph graph) { - OntologyNode fillerNode = graph.nodes.get( ((PropertyValueBNode) filler).getId() ); + // The filler is an RDF list of Individuals - logger.info("filler node uri: "+fillerNode.uri); + OntologyNode fillerNode = graph.nodes.get( ((PropertyValueBNode) filler).getId() ); List fillerIndividuals = new ArrayList<>(); if(fillerNode != null){ @@ -239,19 +254,16 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_ } } } - for(OntologyNode individualNode : fillerIndividuals) { - classNode.properties.addProperty("relatedTo", - new PropertyValueRelated(fillerNode, propertyUri, individualNode)); - individualNode.properties.addProperty("relatedFrom", - new PropertyValueRelated(fillerNode, propertyUri, classNode)); + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerNode, propertyUri, individualNode)); + relatedInfo.addRelatedFrom(individualNode, new PropertyValueRelated(fillerNode, propertyUri, classNode)); } } - + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_intersectionOf( - OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, Set ontologyBaseUris, String preferredPrefix, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_someValuesFrom_intersectionOf(RelatedInfo relatedInfo, + OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, OntologyGraph graph) { // The filler is an RDF list of Classes @@ -267,18 +279,15 @@ private static void annotateRelated_Class_subClassOf_Restriction_someValuesFrom_ // Named nodes only. TODO what to do about bnodes in this case? if(fillerClassNode.uri != null) { - - classNode.properties.addProperty("relatedTo", - new PropertyValueRelated(fillerRestriction, propertyUri, fillerClassNode)); - - fillerClassNode.properties.addProperty("relatedFrom", - new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerClassNode)); + relatedInfo.addRelatedFrom(fillerClassNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); } } - + return relatedInfo; } - private static void annotateRelated_Class_subClassOf_Restriction_hasValue(OntologyNode classNode, String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, OntologyGraph graph) { + private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_hasValue(RelatedInfo relatedInfo, OntologyNode classNode, + String propertyUri, OntologyNode fillerRestriction, PropertyValue filler, OntologyGraph graph) { // The filler can be either an individual or a literal data value. @@ -286,16 +295,55 @@ private static void annotateRelated_Class_subClassOf_Restriction_hasValue(Ontolo OntologyNode fillerNode = graph.nodes.get( ((PropertyValueURI) filler).getUri() ); - if(fillerNode != null && fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + if(fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { // fillerNode is an individual - fillerNode.properties.addProperty("relatedTo", new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); - classNode.properties.addProperty("relatedFrom", new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + relatedInfo.addRelatedTo(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + relatedInfo.addRelatedFrom(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); } - return; + } // TODO: what to do with data values? + return relatedInfo; + } + + private class RelatedInfo { + private Map> relatedFromMap = new HashMap<>(); + private Map> relatedToMap = new HashMap<>(); + + void addRelatedFrom(OntologyNode ontologyNode, PropertyValueRelated relatedFrom) { + Set relatedFromSetToUpdate; + if (relatedFromMap.containsKey(ontologyNode)) { + relatedFromSetToUpdate = relatedFromMap.get(ontologyNode); + } else + relatedFromSetToUpdate = new HashSet<>(); + + relatedFromSetToUpdate.add(relatedFrom); + relatedFromMap.put(ontologyNode, relatedFromSetToUpdate); + } + + void addRelatedTo(OntologyNode ontologyNode, PropertyValueRelated relatedTo) { + Set relatedToSetToUpdate; + if (relatedToMap.containsKey(ontologyNode)) { + relatedToSetToUpdate = relatedToMap.get(ontologyNode); + } else + relatedToSetToUpdate = new HashSet<>(); + + relatedToSetToUpdate.add(relatedTo); + relatedToMap.put(ontologyNode, relatedToSetToUpdate); + } + void updateOntologyNodesWithRelatedLists() { + for(OntologyNode ontologyNode: relatedFromMap.keySet()) { + ontologyNode.properties.addProperty(RELATED_FROM.getText(), + new PropertyValueList(Arrays.asList(relatedFromMap.get(ontologyNode).toArray()))); + } + for(OntologyNode ontologyNode: relatedToMap.keySet()) { + ontologyNode.properties.addProperty(RELATED_TO.getText(), + new PropertyValueList(Arrays.asList(relatedToMap.get(ontologyNode).toArray()))); + } + } + } } From 704a3a9bb7919b510cb3f8b373a1c9a04d829c29 Mon Sep 17 00:00:00 2001 From: deepananbalagan Date: Tue, 10 Dec 2024 12:08:28 +0100 Subject: [PATCH 144/146] updated null check for fillerNode --- .../java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index e0d98bacc..d83454f1c 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -295,7 +295,7 @@ private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_hasValue OntologyNode fillerNode = graph.nodes.get( ((PropertyValueURI) filler).getUri() ); - if(fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + if(fillerNode != null && fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { // fillerNode is an individual relatedInfo.addRelatedTo(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); relatedInfo.addRelatedFrom(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); From 480ac137c80e1315484258c492aa81b23b21bbea Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 9 Jan 2025 14:38:03 +0100 Subject: [PATCH 145/146] updated mongo and neo4j versions for #104 --- dataload/Dockerfile | 6 +++--- docker-compose.yml | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dataload/Dockerfile b/dataload/Dockerfile index 76553405b..355b112b7 100644 --- a/dataload/Dockerfile +++ b/dataload/Dockerfile @@ -27,9 +27,9 @@ RUN cd /opt/ols/dataload && mvn package # and Solr servers are the standard images specified in docker-compose.yml # -RUN if [ ! -f "/opt/ols/dataload/neo4j-community-5.25.1-unix.tar.gz" ]; \ - then mkdir /opt/neo4j && curl https://dist.neo4j.org/neo4j-community-5.25.1-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j; \ - else mkdir /opt/neo4j && tar -xz -f /opt/ols/dataload/neo4j-community-5.25.1-unix.tar.gz --strip-components=1 -C /opt/neo4j; fi +RUN if [ ! -f "/opt/ols/dataload/neo4j-community-5.26.0-unix.tar.gz" ]; \ + then mkdir /opt/neo4j && curl https://dist.neo4j.org/neo4j-community-5.26.0-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j; \ + else mkdir /opt/neo4j && tar -xz -f /opt/ols/dataload/neo4j-community-5.26.0-unix.tar.gz --strip-components=1 -C /opt/neo4j; fi RUN if [ ! -f "/opt/ols/dataload/solr-9.7.0.tgz" ]; \ then mkdir /opt/solr && curl https://archive.apache.org/dist/solr/solr/9.7.0/solr-9.7.0.tgz | tar -xz --strip-components=1 -C /opt/solr; \ diff --git a/docker-compose.yml b/docker-compose.yml index 93adea45f..21162e43c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,7 +17,7 @@ services: #- ./docker_out:/tmp/out:rw command: ./dataload.dockersh ${OLS4_DATALOAD_ARGS:-} mongo: - image: mongo:3.4.24 + image: mongo:7.0.16 ports: - 27017:27017 volumes: @@ -37,7 +37,7 @@ services: ols4-dataload: condition: service_completed_successfully ols4-neo4j: - image: neo4j:5.25.1-community + image: neo4j:5.26.0-community ports: - 7474:7474 - 7687:7687 From 8f85fe8c70e87507a5e2004076ceb817eecb479a Mon Sep 17 00:00:00 2001 From: Erhun Giray TUNCAY <48091473+giraygi@users.noreply.github.com> Date: Thu, 9 Jan 2025 17:44:59 +0100 Subject: [PATCH 146/146] updated mongo to latest for #104 --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 21162e43c..c54160272 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,7 +17,7 @@ services: #- ./docker_out:/tmp/out:rw command: ./dataload.dockersh ${OLS4_DATALOAD_ARGS:-} mongo: - image: mongo:7.0.16 + image: mongo:8.0.4 ports: - 27017:27017 volumes: