diff --git a/backend/Dockerfile b/backend/Dockerfile index e224ff95d..6198f4dc5 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,6 +1,5 @@ -#FROM maven:3.6.3-adoptopenjdk-14 -FROM maven:3.8-jdk-11 +FROM maven:3.9.6-eclipse-temurin-17 RUN mkdir /opt/ols && mkdir /opt/ols/backend && mkdir /opt/ols/ols-shared COPY ./backend /opt/ols/backend diff --git a/backend/pom.xml b/backend/pom.xml index 838ec3ae5..24b8b525e 100644 --- a/backend/pom.xml +++ b/backend/pom.xml @@ -15,9 +15,9 @@ - 11 - 11 - 11 + 17 + 17 + 17 @@ -32,11 +32,23 @@ 2.8.9 - - org.neo4j.driver - neo4j-java-driver - 4.4.1 - + + org.neo4j.driver + neo4j-java-driver + 5.19.0 + + + + org.springframework.boot + spring-boot-starter-data-mongodb + 2.7.5 + + + + javax.persistence + javax.persistence-api + 2.2 + org.springframework.boot @@ -149,6 +161,18 @@ springdoc-openapi-ui 1.6.4 + + org.apache.commons + commons-collections4 + 4.4 + compile + + + joda-time + joda-time + 2.13.0 + compile + diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java b/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java new file mode 100644 index 000000000..c019d6984 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/config/RestCallInterceptor.java @@ -0,0 +1,31 @@ +package uk.ac.ebi.spot.ols.config; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.servlet.HandlerInterceptor; + +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; + +public class RestCallInterceptor implements HandlerInterceptor { + private final RestCallHandlerService restCallHandlerService; + + @Autowired + public RestCallInterceptor(RestCallHandlerService restCallHandlerService) { + this.restCallHandlerService = restCallHandlerService; + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception { + if (!request.getRequestURL().toString().contains("/api") + || request.getRequestURL().toString().contains("/api/rest/statistics")) { + return true; + } + + + restCallHandlerService.handle(request); + + return true; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java b/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java index c67a52c02..640772f91 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/config/WebConfig.java @@ -2,13 +2,9 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; -import org.springframework.context.annotation.Import; -import org.springframework.http.MediaType; -import org.springframework.web.filter.CharacterEncodingFilter; import org.springframework.web.servlet.config.annotation.*; -import org.springframework.web.util.UrlPathHelper; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; /** * @author Simon Jupp @@ -25,6 +21,10 @@ public class WebConfig extends WebMvcConfigurerAdapter { * * @param configurer */ + + @Autowired + RestCallHandlerService restCallHandlerService; + @Override public void configurePathMatch(PathMatchConfigurer configurer) { // UrlPathHelper urlPathHelper = new UrlPathHelper(); @@ -36,17 +36,15 @@ public void configurePathMatch(PathMatchConfigurer configurer) { } -// @Bean -// MaintenanceInterceptor getMaintenanceInterceptor() { -// return new MaintenanceInterceptor(); -// } - -// @Autowired -// MaintenanceInterceptor interceptor; -// @Override -// public void addInterceptors(InterceptorRegistry registry) { -// registry.addInterceptor(interceptor); -// } + @Override + public void addInterceptors(InterceptorRegistry registry) { + registry.addInterceptor(getRestCallInterceptor()); + } + + @Bean + public RestCallInterceptor getRestCallInterceptor() { + return new RestCallInterceptor(restCallHandlerService); + } @Override public void addCorsMappings(CorsRegistry registry) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java new file mode 100644 index 000000000..3a86e8c58 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/TopConceptEnum.java @@ -0,0 +1,7 @@ +package uk.ac.ebi.spot.ols.controller.api.v1; + +public enum TopConceptEnum { + SCHEMA, + TOPCONCEPTOF_PROPERTY, + RELATIONSHIPS +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java index 4452d07bf..dabe0309e 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1ApiUnavailable.java @@ -6,8 +6,7 @@ import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletResponse; @@ -16,7 +15,7 @@ * @date 27/09/2016 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ -@Controller +@RestController public class V1ApiUnavailable { @RequestMapping(path = "/api/unavailable", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java index d5e4a4fc5..e3b5438a5 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1IndividualController.java @@ -30,6 +30,7 @@ * @date 18/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ + @Tag(name = "Individual Controller", description = "NOTE: For IRI parameters, the value must be URL encoded. " + "For example, the IRI http://purl.obolibrary.org/obo/IAO_0000124 should be encoded as http%3A%252F%2Fpurl.obolibrary.org%2Fobo%2FIAO_0000124.") @RestController @@ -97,7 +98,8 @@ HttpEntity> getAllIndividuals( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + + @RequestMapping(path = "/findByIdAndIsDefiningOntology/{iri}", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @PathVariable("iri") @@ -111,11 +113,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( decoded = UriUtils.decode(termId, "UTF-8"); return getAllIndividualsByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - - - @RequestMapping(path = "/findByIdAndIsDefiningOntology", - produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, + } + + + @RequestMapping(path = "/findByIdAndIsDefiningOntology", + produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) @@ -148,11 +150,11 @@ HttpEntity> getAllIndividualsByIdAndIsDefiningOntology( return new ResponseEntity<>(assembler.toModel(terms, individualAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { } -} \ No newline at end of file +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java index 79050ce62..bb59cc00a 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyController.java @@ -7,6 +7,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.RepositoryLinksResource; import org.springframework.data.rest.webmvc.ResourceNotFoundException; @@ -22,10 +23,14 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; -import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; + +import io.swagger.v3.oas.annotations.Parameter; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; +import java.lang.reflect.*; +import java.util.*; import javax.servlet.http.HttpServletRequest; @@ -34,6 +39,7 @@ * @date 19/08/2015 * Samples, Phenotypes and Ontologies Team, EMBL-EBI */ + @Tag(name = "Ontology Controller") @RestController @RequestMapping("/api/ontologies") @@ -86,6 +92,65 @@ HttpEntity> getOntology( return new ResponseEntity<>( documentAssembler.toModel(document), HttpStatus.OK); } + @RequestMapping(path = "/filterby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getOntologiesByMetadata( + @RequestParam(value = "schema", required = true) Collection schemas, + @RequestParam(value = "classification", required = true) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler + ) throws ResourceNotFoundException { + Set tempSet = new HashSet(); + if (filterOption == FilterOption.LINEAR) + tempSet = ontologyRepository.filter(schemas,classifications,exclusive, lang); + else if (filterOption == FilterOption.COMPOSITE) + tempSet = ontologyRepository.filterComposite(schemas,classifications,exclusive, lang); + else if (filterOption == FilterOption.LICENSE) + tempSet = ontologyRepository.filterLicense(schemas,classifications,exclusive,lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + + return new ResponseEntity<>( assembler.toModel(document, documentAssembler), HttpStatus.OK); + } + + @RequestMapping(path = "/schemakeys", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterKeys( + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaKeys(lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + + @RequestMapping(path = "/schemavalues", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterValues( + @RequestParam(value = "schema", required = true) Collection schemas, + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaValues(schemas,lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java index d823a9cbb..bb9e80325 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyPropertyController.java @@ -23,8 +23,10 @@ import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.model.v1.V1Property; import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepository; +import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepositoryExtn; import uk.ac.ebi.spot.ols.repository.v1.V1PropertyRepository; import uk.ac.ebi.spot.ols.service.Neo4jClient; +import uk.ac.ebi.spot.ols.service.ViewMode; import javax.servlet.http.HttpServletRequest; import java.util.Arrays; @@ -43,6 +45,9 @@ public class V1OntologyPropertyController { @Autowired V1JsTreeRepository jsTreeRepository; + + @Autowired + V1JsTreeRepositoryExtn jsTreeRepositoryExtn; @Autowired Neo4jClient neo4jClient; @@ -269,7 +274,7 @@ HttpEntity getJsTree( try { String decoded = UriUtils.decode(termId, "UTF-8"); - Object object= jsTreeRepository.getJsTreeForProperty(decoded, ontologyId, lang); + Object object= jsTreeRepositoryExtn.getJsTreeForPropertyByViewMode(decoded, ontologyId, lang, ViewMode.getFromShortName(viewMode), siblings); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); } catch (JsonProcessingException e) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java new file mode 100644 index 000000000..42a8a94d9 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologySKOSConceptController.java @@ -0,0 +1,348 @@ +package uk.ac.ebi.spot.ols.controller.api.v1; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.tags.Tag; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.util.UriUtils; + +import uk.ac.ebi.spot.ols.model.Edge; +import uk.ac.ebi.spot.ols.model.Node; +import uk.ac.ebi.spot.ols.model.SKOSRelation; +import uk.ac.ebi.spot.ols.model.v1.V1Term; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; +import uk.ac.ebi.spot.ols.repository.v1.V1TermRepository; + +import javax.servlet.http.HttpServletRequest; +import java.util.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +@RestController +@RequestMapping("/api/ontologies") +@Tag(name = "v1-ontology-skos-controller", description = "SKOS concept hierarchies and relations extracted from individuals (instances) from a particular ontology in this service") +public class V1OntologySKOSConceptController { + + private Logger log = LoggerFactory.getLogger(getClass()); + + @Autowired + private V1TermRepository termRepository; + + @Autowired + V1TermAssembler termAssembler; + + @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity>> getSKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + if (TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + return new ResponseEntity<>(termRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); + else + return new ResponseEntity<>(termRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); + } + + @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + List> rootIndividuals = null; + if(TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + rootIndividuals = termRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); + else + rootIndividuals = termRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); + StringBuilder sb = new StringBuilder(); + for (TreeNode root : rootIndividuals) { + sb.append(root.getIndex() + " , "+ root.getData().label + " , " + root.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); + } + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V1Term()); + String decodedIri; + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + if (topConcept.getData().iri == null) + throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); + return new ResponseEntity<>(topConcept, HttpStatus.OK); + } + + @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V1Term()); + String decodedIri; + StringBuilder sb = new StringBuilder(); + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = termRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + sb.append(topConcept.getIndex() + " , "+ topConcept.getData().label + " , " + topConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/relations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + + return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/displayrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelated(ontologyId, decodedIri, relationType,lang); + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + int count = 0; + for (V1Term individual : conceptPage.getContent()) + sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); + + return new HttpEntity<>( sb.toString()); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/indirectrelations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + return new ResponseEntity<>( related, HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/displayindirectrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) String relationType, + @Parameter(description = "Page size to retrieve individuals", required = true) + @RequestParam(value = "obsoletes", required = false) Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = termRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType, obsoletes,lang,pageable); + + int count = 0; + for (V1Term individual : related) + sb.append(++count).append(" , ").append(individual.label).append(" , ").append(individual.iri).append("\n"); + + + return new ResponseEntity<>( sb.toString(), HttpStatus.OK); + + } + + @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") + @RequestMapping(path = "/{onto}/skos/{iri}/graph", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity retrieveImmediateGraph( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + + V1Term subjectTerm = termRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + + related = termRepository.findRelated(ontologyId, decodedIri, "related",lang); + + List narrower = new ArrayList(); + narrower = termRepository.findRelated(ontologyId, decodedIri, "narrower",lang); + + List broader = new ArrayList(); + broader = termRepository.findRelated(ontologyId, decodedIri, "broader",lang); + + Set relatedNodes = new HashSet(); + related.forEach(term -> relatedNodes.add(new Node(term.iri, term.label))); + Set narrowerNodes = new HashSet(); + narrower.forEach(term -> narrowerNodes.add(new Node(term.iri, term.label))); + Set broaderNodes = new HashSet(); + broader.forEach(term -> broaderNodes.add(new Node(term.iri, term.label))); + + Set edges = new HashSet(); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "related", SKOSRelation.related.getPropertyName()))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "narrower",SKOSRelation.narrower.getPropertyName()))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "broader",SKOSRelation.broader.getPropertyName()))); + + Set nodes = new HashSet(); + nodes.add(new Node(decodedIri,subjectTerm.label)); + nodes.addAll(relatedNodes); + nodes.addAll(broaderNodes); + nodes.addAll(narrowerNodes); + + + Map graph = new HashMap(); + graph.put("nodes", nodes); + graph.put("edges", edges); + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + try { + return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { + StringBuilder sb = new StringBuilder(); + for (TreeNode childConcept : rootConcept.getChildren()) { + sb.append(childConcept.getIndex() + " , "+ childConcept.getData().label + " , " + childConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(childConcept,displayRelated)); + } + if(displayRelated) + for (TreeNode relatedConcept : rootConcept.getRelated()) { + sb.append(relatedConcept.getIndex() + " , "+ relatedConcept.getData().label + " , " + relatedConcept.getData().iri).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(relatedConcept,displayRelated)); + } + return sb; + } + + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") + @ExceptionHandler(ResourceNotFoundException.class) + public void handleError(HttpServletRequest req, Exception exception) { + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java index 297594482..a6fafc9df 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1OntologyTermController.java @@ -22,6 +22,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; +import uk.ac.ebi.spot.ols.model.v1.V1Individual; import uk.ac.ebi.spot.ols.model.v1.V1Term; import uk.ac.ebi.spot.ols.repository.v1.V1GraphRepository; import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepository; @@ -31,6 +32,12 @@ import javax.servlet.http.HttpServletRequest; import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.Map; + +import java.nio.charset.StandardCharsets; +import uk.ac.ebi.spot.ols.repository.v1.V1JsTreeRepositoryExtn; +import uk.ac.ebi.spot.ols.service.ViewMode; /** * @author Simon Jupp @@ -51,11 +58,17 @@ public class V1OntologyTermController { @Autowired V1TermAssembler termAssembler; + @Autowired + V1IndividualAssembler individualAssembler; + @Autowired V1PreferredRootTermAssembler preferredRootTermAssembler; @Autowired V1JsTreeRepository jsTreeRepository; + + @Autowired + V1JsTreeRepositoryExtn jsTreeRepositoryExtn; @Autowired V1GraphRepository graphRepository; @@ -406,6 +419,139 @@ HttpEntity> ancestors(@PathVariable("onto") return new ResponseEntity<>( assembler.toModel(ancestors, termAssembler), HttpStatus.OK); } + @RequestMapping(path = "/{onto}/terms/{iri}/superclasses", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSuperClasses( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page superClasses = graphRepository.getSuperClassPaginated(entityId, lang, pageable); + if (superClasses == null) + throw new ResourceNotFoundException("No super classes could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(superClasses, termAssembler), HttpStatus.OK); + } + + @RequestMapping(path = "/{onto}/terms/{iri}/equivalentclasses", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getEquivalentClasses( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page equivalentClasses = graphRepository.getEquivalentClassPaginated(entityId, lang, pageable); + if (equivalentClasses == null) + throw new ResourceNotFoundException("No equivalent classes could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(equivalentClasses, termAssembler), HttpStatus.OK); + } + + + @RequestMapping(path = "/{onto}/terms/{iri}/relatedfrom", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getRelatedFrom( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page relatedFroms = graphRepository.getRelatedFromPaginated(entityId, lang, pageable); + if (relatedFroms == null) + throw new ResourceNotFoundException("No related from terms could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(relatedFroms, termAssembler), HttpStatus.OK); + } + + @RequestMapping(path = "/{onto}/terms/{iri}/instances", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getInstances( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + @Parameter(hidden = true) Pageable pageable, + @Parameter(hidden = true) PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + Page instances = graphRepository.getTermInstancesPaginated(entityId, lang, pageable); + if (instances == null) + throw new ResourceNotFoundException("No instances could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( assembler.toModel(instances, individualAssembler), HttpStatus.OK); + } + + @RequestMapping(path = "/{onto}/terms/{iri}/json", produces = {MediaType.APPLICATION_JSON_VALUE, + MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity getJson( + @PathVariable("onto") + @Parameter(name = "onto", + description = "The ID of the ontology. For example for Data Use Ontology, the ID is duo.", + example = "duo") String ontologyId, + @PathVariable("iri") + @Parameter(name = "iri", + description = "The IRI of the term, this value must be single URL encoded", + example = "http%3A%2F%2Fpurl.obolibrary.org%2Fobo%2FDUO_0000017") String termId) { + + ontologyId = ontologyId.toLowerCase(); + + String decoded = UriUtils.decode(termId, "UTF-8"); + String entityId = ontologyId+"+class+"+decoded; + String json = graphRepository.getTermJson(entityId); + if (json == null) + throw new ResourceNotFoundException("No _json could be found for " + ontologyId + + " and " + termId); + + return new ResponseEntity<>( json, HttpStatus.OK); + } + @RequestMapping(path = "/{onto}/terms/{iri}/jstree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) @@ -425,8 +571,8 @@ HttpEntity graphJsTree( ontologyId = ontologyId.toLowerCase(); try { - String decodedTermId = UriUtils.decode(termId, "UTF-8"); - Object object= jsTreeRepository.getJsTreeForClass(decodedTermId, ontologyId, lang); + String decodedTermId = decodeUrl(termId); + Object object= jsTreeRepositoryExtn.getJsTreeForClassByViewMode(decodedTermId, ontologyId, lang, ViewMode.getFromShortName(viewMode), siblings); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); } catch (JsonProcessingException e) { @@ -485,7 +631,7 @@ HttpEntity graphJson( Object object= graphRepository.getGraphForClass(decoded, ontologyId, lang); ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); return new HttpEntity(ow.writeValueAsString(object)); - } catch (JsonProcessingException e) { + } catch (Exception e) { e.printStackTrace(); } throw new ResourceNotFoundException(); @@ -787,5 +933,13 @@ HttpEntity> termHierarchicalAncestorsByOntology( public void handleError(HttpServletRequest req, Exception exception) { } + + private static String decodeUrl(String url) { + if(url.contains("%") || url.contains("+")) + { + return decodeUrl(java.net.URLDecoder.decode(url, StandardCharsets.UTF_8)); + } + return url; + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java index 2435371b7..15e1ae04f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1PropertyController.java @@ -112,8 +112,8 @@ HttpEntity> getPropertiesByIriAndIsDefiningOntology(@Path String decoded = null; decoded = UriUtils.decode(termId, "UTF-8"); return getPropertiesByIdAndIsDefiningOntology(decoded, null, null, lang, pageable, assembler); - } - + } + @RequestMapping(path = "/findByIdAndIsDefiningOntology", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) HttpEntity> getPropertiesByIdAndIsDefiningOntology( @RequestParam(value = "iri", required = false) @@ -149,7 +149,7 @@ else if (oboId != null) { return new ResponseEntity<>( assembler.toModel(terms, termAssembler), HttpStatus.OK); } - + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "EntityModel not found") @ExceptionHandler(ResourceNotFoundException.class) public void handleError(HttpServletRequest req, Exception exception) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java index 55896fb00..139d27429 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SearchController.java @@ -20,8 +20,10 @@ import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField.Count; +import io.swagger.v3.oas.annotations.Parameter; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; +import org.apache.solr.common.SolrDocumentList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -29,11 +31,10 @@ import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; - import com.google.gson.Gson; import com.google.gson.JsonObject; import com.google.gson.JsonParser; - +import uk.ac.ebi.spot.ols.model.FilterOption; import org.springframework.web.bind.annotation.RestController; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; @@ -43,6 +44,7 @@ import uk.ac.ebi.spot.ols.repository.v1.V1OntologyRepository; import uk.ac.ebi.spot.ols.repository.v1.mappers.AnnotationExtractor; + import static uk.ac.ebi.ols.shared.DefinedFields.*; @@ -67,10 +69,18 @@ public void search( @Parameter(name = "q", description = "The terms to search. By default the search is performed over term labels, synonyms, descriptions, identifiers and annotation properties.", example = "disease or liver+disease") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, @RequestParam(value = "ontology", required = false) @Parameter(name = "ontology", description = "Restrict a search to a set of ontologies e.g. ontology=efo,bfo", example = "efo,bfo") Collection ontologies, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) @Parameter(name = "type", description = "Restrict a search to an entity type, one of {class,property,individual,ontology}", @@ -122,6 +132,8 @@ public void search( HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); + final SolrQuery solrQuery = new SolrQuery(); // 1 if (queryFields == null) { @@ -195,7 +207,7 @@ public void search( if (groupField != null) { solrQuery.addFilterQuery("{!collapse field=iri}"); - solrQuery.add("expand=true", "true"); + solrQuery.add("expand", "true"); solrQuery.add("expand.rows", "100"); } @@ -262,7 +274,8 @@ public void search( QueryResponse qr = solrClient.dispatchSearch(solrQuery, "ols4_entities"); - List docs = new ArrayList<>(); + List docs = parseSolrDocs(qr.getResults(), fieldList, lang); + /*List docs = new ArrayList<>(); for(SolrDocument res : qr.getResults()) { String _json = (String)res.get("_json"); if(_json == null) { @@ -331,7 +344,7 @@ public void search( } docs.add(outDoc); - } + }*/ Map responseHeader = new HashMap<>(); responseHeader.put("status", 0); @@ -369,12 +382,109 @@ public void search( * Fix: End */ + /** + * Fix: Start + * issue - https://github.com/TIBHannover/ols4/issues/78 + * + */ + if(qr.getExpandedResults() != null && qr.getExpandedResults().size() > 0) + responseObj.put("expanded", parseExpandedSolrResults(qr.getExpandedResults(), fieldList, lang)); + + /** + * Fix: End + */ response.setContentType(MediaType.APPLICATION_JSON_VALUE); response.setCharacterEncoding(StandardCharsets.UTF_8.name()); response.getOutputStream().write(gson.toJson(responseObj).getBytes(StandardCharsets.UTF_8)); response.flushBuffer(); } + private Map parseExpandedSolrResults(Map expandedResults, Collection fieldList, + String lang) { + Map result = new HashMap<>(); + expandedResults.entrySet().parallelStream().forEach((entry) -> { + Map expandedResult = new HashMap<>(); + expandedResult.put("numFound", entry.getValue().getNumFound()); + expandedResult.put("start", entry.getValue().getStart()); + expandedResult.put("docs", parseSolrDocs(entry.getValue(), fieldList, lang)); + result.put(entry.getKey(), expandedResult); + }); + return result; + } + + private List parseSolrDocs(SolrDocumentList results, Collection fieldList, String lang) { + List docs = new ArrayList<>(); + for(SolrDocument res : results) { + String _json = (String)res.get("_json"); + if(_json == null) { + throw new RuntimeException("_json was null"); + } + + JsonObject json = RemoveLiteralDatatypesTransform.transform( + LocalizationTransform.transform( JsonParser.parseString( _json ), lang) + ).getAsJsonObject(); + + Map outDoc = new HashMap<>(); + + if (fieldList == null) { + fieldList = new HashSet<>(); + } + // default fields + if (fieldList.isEmpty()) { + fieldList.add("id"); + fieldList.add("iri"); + fieldList.add("ontology_name"); + fieldList.add("label"); + fieldList.add("description"); + fieldList.add("short_form"); + fieldList.add("obo_id"); + fieldList.add("type"); + fieldList.add("ontology_prefix"); + } + + if (fieldList.contains("id")) outDoc.put("id", JsonHelper.getString(json, "id")); + if (fieldList.contains("iri")) outDoc.put("iri", JsonHelper.getString(json, "iri")); + if (fieldList.contains("ontology_name")) outDoc.put("ontology_name", JsonHelper.getString(json, "ontologyId")); + if (fieldList.contains("label")) { + var label = outDoc.put("label", JsonHelper.getString(json, "label")); + if(label!=null) { + outDoc.put("label", label); + } + } + if (fieldList.contains("description")) outDoc.put("description", JsonHelper.getStrings(json, "definition")); + if (fieldList.contains("short_form")) outDoc.put("short_form", JsonHelper.getString(json, "shortForm")); + if (fieldList.contains("obo_id")) outDoc.put("obo_id", JsonHelper.getString(json, "curie")); + if (fieldList.contains(IS_DEFINING_ONTOLOGY.getOls3Text())) outDoc.put(IS_DEFINING_ONTOLOGY.getOls3Text(), + JsonHelper.getString(json, IS_DEFINING_ONTOLOGY.getText()) != null && + JsonHelper.getString(json, IS_DEFINING_ONTOLOGY.getText()).equals("true")); + if (fieldList.contains("type")) { + outDoc.put("type", JsonHelper.getType(json, "type")); + } + if (fieldList.contains("synonym")) outDoc.put("synonym", JsonHelper.getStrings(json, "synonym")); + if (fieldList.contains("ontology_prefix")) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + if (fieldList.contains("subset")) outDoc.put("subset", JsonHelper.getStrings(json, "http://www.geneontology.org/formats/oboInOwl#inSubset")); + if (fieldList.contains("ontology_iri")) outDoc.put("ontology_iri", JsonHelper.getStrings(json, "ontologyIri").get(0)); + if (fieldList.contains("score")) outDoc.put("score", res.get("score")); + + // Include annotations that were specified with _annotation + boolean anyAnnotations = fieldList.stream() + .anyMatch(s -> s.endsWith("_annotation")); + if (anyAnnotations) { + Stream annotationFields = fieldList.stream().filter(s -> s.endsWith("_annotation")); + Map termAnnotations = AnnotationExtractor.extractAnnotations(json); + + annotationFields.forEach(annotationName -> { + // Remove _annotation suffix to get plain annotation name + String fieldName = annotationName.replaceFirst("_annotation$", ""); + outDoc.put(annotationName, termAnnotations.get(fieldName)); + }); + } + + docs.add(outDoc); + } + return docs; + } + private Map> parseFacetFields(List facetFields) { Map> facetFieldsMap = new HashMap<>(); List newFacetFields; @@ -425,9 +535,4 @@ private String createUnionQuery(String query, String[] fields, boolean exact) { } return builder.toString(); } - - - - - } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java index 8aad57cba..31e547b3f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SelectController.java @@ -13,6 +13,7 @@ import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.MediaType; +import uk.ac.ebi.spot.ols.model.FilterOption; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; @@ -52,10 +53,18 @@ public void select( @Parameter(name = "q", description = "The terms to search. By default the search is performed over term labels, synonyms, descriptions, identifiers and annotation properties.", example = "disease or liver+disease") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, @RequestParam(value = "ontology", required = false) @Parameter(name = "ontology", description = "Restrict a search to a set of ontologies e.g. ontology=efo,bfo", example = "efo,bfo") Collection ontologies, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "type", required = false) @Parameter(name = "type", description = "Restrict a search to an entity type, one of {class,property,individual,ontology}", @@ -89,6 +98,8 @@ public void select( HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); + final SolrQuery solrQuery = new SolrQuery(); // 1 String queryLc = query.toLowerCase(); @@ -199,7 +210,10 @@ public void select( outDoc.put("type", JsonHelper.getType(json, "type")); } if (fieldList.contains(SYNONYM.getText())) outDoc.put(SYNONYM.getText(), JsonHelper.getStrings(json, SYNONYM.getText())); - if (fieldList.contains("ontology_prefix")) outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + if (fieldList.contains("ontology_prefix") && JsonHelper.getString(json, "ontologyPreferredPrefix") != null) + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyPreferredPrefix")); + else if (fieldList.contains("ontology_prefix")) + outDoc.put("ontology_prefix", JsonHelper.getString(json, "ontologyId").toUpperCase()); docs.add(outDoc); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java index 256fbbb7f..f76e4c49d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v1/V1SuggestController.java @@ -1,18 +1,16 @@ package uk.ac.ebi.spot.ols.controller.api.v1; import com.google.gson.Gson; +import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.tags.Tag; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.client.solrj.response.Suggestion; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrDocument; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.hateoas.server.ExposesResourceFor; import org.springframework.http.MediaType; -import org.springframework.stereotype.Controller; +import uk.ac.ebi.spot.ols.model.FilterOption; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; @@ -43,12 +41,23 @@ public class V1SuggestController { @RequestMapping(path = "/api/suggest", produces = {MediaType.APPLICATION_JSON_VALUE}, method = RequestMethod.GET) public void suggest( @RequestParam("q") String query, + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, @RequestParam(value = "ontology", required = false) Collection ontologies, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption, @RequestParam(value = "rows", defaultValue = "10") Integer rows, @RequestParam(value = "start", defaultValue = "0") Integer start, + @RequestParam(value = "lang", defaultValue = "en") String lang, HttpServletResponse response ) throws IOException, SolrServerException { + ontologies = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologies,exclusive,filterOption,lang); + final SolrQuery solrQuery = new SolrQuery(); String queryLc = query.toLowerCase(); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/HealthCheckController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/HealthCheckController.java index 6d188d5e0..4a88bbc6c 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/HealthCheckController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/HealthCheckController.java @@ -9,10 +9,13 @@ import org.springframework.http.ResponseEntity; import org.springframework.http.HttpStatus; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; +import java.util.Collection; +import java.util.Collections; import java.util.Map; @RestController @@ -56,7 +59,7 @@ private boolean checkSolr() { try { V2PagedAndFacetedResponse result = new V2PagedAndFacetedResponse<>( ontologyRepository.find(pageable, "en", null, null, null, - false, Map.of())); + false, Map.of(), Collections.emptyList(),Collections.emptyList(),Collections.emptyList(),false, FilterOption.LINEAR)); if (result.totalElements > 0) { logger.info("Solr is initialized."); return true; diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java index 4c40859bc..b80c86a4f 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2ClassController.java @@ -8,12 +8,8 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; -import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; @@ -27,10 +23,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import static uk.ac.ebi.ols.shared.DefinedFields.*; -@Controller +@RestController @RequestMapping("/api/v2") public class V2ClassController { @@ -96,7 +91,7 @@ public HttpEntity getClass( iri = UriUtils.decode(iri, "UTF-8"); - V2Entity entity = classRepository.getByOntologyIdAndIri(ontologyId, iri, lang); + V2Entity entity = classRepository.findByOntologyAndIri(ontologyId, iri, lang); if (entity == null) throw new ResourceNotFoundException(); return new ResponseEntity<>( entity, HttpStatus.OK); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java index 764520710..a2ce3fba9 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2EntityController.java @@ -10,10 +10,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; @@ -26,10 +23,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; - import static uk.ac.ebi.ols.shared.DefinedFields.*; -@Controller +@RestController @RequestMapping("/api/v2") public class V2EntityController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java index 00feeb842..b5df5afca 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2IndividualController.java @@ -11,10 +11,7 @@ import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import org.springframework.web.util.UriUtils; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; @@ -30,7 +27,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2IndividualController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java index d84c8a82d..33f214980 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologyController.java @@ -1,34 +1,37 @@ package uk.ac.ebi.spot.ols.controller.api.v2; import com.google.gson.Gson; +import io.swagger.v3.oas.annotations.Parameter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.data.web.PageableDefault; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.Link; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; import org.springframework.http.HttpEntity; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.PathVariable; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.*; import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.controller.api.v2.responses.V2PagedAndFacetedResponse; +import uk.ac.ebi.spot.ols.model.FilterOption; +import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; import static uk.ac.ebi.ols.shared.DefinedFields.*; import java.io.IOException; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import java.util.*; -@Controller +@RestController @RequestMapping("/api/v2/ontologies") public class V2OntologyController { @@ -48,9 +51,17 @@ public HttpEntity> getOntologies( @RequestParam(value = "boostFields", required = false) String boostFields, @RequestParam(value = "exactMatch", required = false, defaultValue = "false") boolean exactMatch, @RequestParam(value = "includeObsoleteEntities", required = false, defaultValue = "false") boolean includeObsoleteEntities, - @RequestParam Map> searchProperties + @RequestParam Map> searchProperties, + @RequestParam(value = "schema", required = false) List schemas, + @RequestParam(value = "classification", required = false) List classifications, + @RequestParam(value = "ontology", required = false) List ontologies, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "LINEAR") FilterOption filterOption ) throws ResourceNotFoundException, IOException { - Map> properties = new HashMap<>(); if(!includeObsoleteEntities) properties.put(IS_OBSOLETE.getText(), List.of("false")); @@ -58,7 +69,7 @@ public HttpEntity> getOntologies( return new ResponseEntity<>( new V2PagedAndFacetedResponse<>( - ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties)) + ontologyRepository.find(pageable, lang, search, searchFields, boostFields, exactMatch, DynamicQueryHelper.filterProperties(properties),schemas,classifications,ontologies,exclusive,filterOption) ), HttpStatus.OK); } @@ -73,4 +84,34 @@ public HttpEntity getOntology( if (entity == null) throw new ResourceNotFoundException(); return new ResponseEntity<>( entity, HttpStatus.OK); } + + @RequestMapping(path = "/schemakeys", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterKeys( + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaKeys(lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + + @RequestMapping(path = "/schemavalues", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> filterValues( + @RequestParam(value = "schema", required = true) Collection schemas, + @PageableDefault(size = 100, page = 0) Pageable pageable, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + PagedResourcesAssembler assembler){ + Set tempSet = ontologyRepository.getSchemaValues(schemas,lang); + List tempList = new ArrayList(); + tempList.addAll(tempSet); + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), tempSet.size()); + Page document = new PageImpl<>(tempList.subList(start, end), pageable, tempSet.size()); + return new ResponseEntity<>(document, HttpStatus.OK); + } + } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java new file mode 100644 index 000000000..a8d102245 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2OntologySKOSConceptController.java @@ -0,0 +1,341 @@ +package uk.ac.ebi.spot.ols.controller.api.v2; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.google.gson.JsonObject; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.media.Schema; +import io.swagger.v3.oas.annotations.tags.Tag; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.hateoas.MediaTypes; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; +import org.springframework.web.util.UriUtils; +import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; +import uk.ac.ebi.spot.ols.model.Edge; +import uk.ac.ebi.spot.ols.model.Node; +import uk.ac.ebi.spot.ols.model.SKOSRelation; +import uk.ac.ebi.spot.ols.model.v2.V2Entity; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; +import uk.ac.ebi.spot.ols.repository.v2.V2SKOSRepository; + +import javax.servlet.http.HttpServletRequest; +import java.io.IOException; +import java.util.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +@RestController +@RequestMapping("/api/v2/ontologies") +@Tag(name = "v2-ontology-skos-controller", description = "SKOS concept hierarchies and relations extracted from individuals (instances) from a particular ontology in this service") +public class V2OntologySKOSConceptController { + + @Autowired + V2SKOSRepository skosRepository; + + @Operation(description = "Get complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/conceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity>> getSKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + if (TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + return new ResponseEntity<>(skosRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable), HttpStatus.OK); + else + return new ResponseEntity<>(skosRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable), HttpStatus.OK); + } + + @Operation(description = "Display complete SKOS concept hierarchy or only top concepts based on alternative top concept identification methods and concept relations. If only top concepts are identified, they can be used to extract the following levels of the concept tree one by one using the /{onto}/displayconceptrelations/{iri} method with broader or narrower concept relations.") + @RequestMapping(path = "/{onto}/skos/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntology( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "infer top concepts by schema (hasTopConcept) or TopConceptOf property or broader/narrower relationships", required = true) + @RequestParam(value = "find_roots", required = true, defaultValue = "SCHEMA") TopConceptEnum topConceptIdentification, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "Extract the whole tree with children or only the top concepts", required = true) + @RequestParam(value = "with_children", required = true, defaultValue = "false") boolean withChildren, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + List> rootIndividuals = null; + if(TopConceptEnum.RELATIONSHIPS == topConceptIdentification) + rootIndividuals = skosRepository.conceptTreeWithoutTop(ontologyId,narrower,withChildren,obsoletes,lang,pageable); + else + rootIndividuals = skosRepository.conceptTree(ontologyId,TopConceptEnum.SCHEMA == topConceptIdentification,narrower, withChildren,obsoletes,lang,pageable); + StringBuilder sb = new StringBuilder(); + for (TreeNode root : rootIndividuals) { + sb.append(root.getIndex() + " , "+ root.getData().any().get("label").toString() + " , " + root.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(root, displayRelated)); + } + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Get partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/tree", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity> getSKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V2Entity(new JsonObject())); + String decodedIri; + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = skosRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + if (topConcept.getData().any().get("iri").toString() == null) + throw new ResourceNotFoundException("No roots could be found for " + ontologyId ); + return new ResponseEntity<>(topConcept, HttpStatus.OK); + } + + @Operation(description = "Display partial SKOS concept hierarchy based on the encoded iri of the designated top concept") + @RequestMapping(path = "/{onto}/skos/{iri}/displaytree", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + HttpEntity displaySKOSConceptHierarchyByOntologyAndIri( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "infer from narrower or broader relationships", required = true) + @RequestParam(value = "narrower", required = true, defaultValue = "false") boolean narrower, + @Parameter(description = "display related concepts", required = true) + @RequestParam(value = "display_related", required = true, defaultValue = "false") boolean displayRelated, + @Parameter(description = "index value for the root term", required = true) + @RequestParam(value = "index", required = true, defaultValue = "1") String index, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + TreeNode topConcept = new TreeNode(new V2Entity(new JsonObject())); + String decodedIri; + StringBuilder sb = new StringBuilder(); + decodedIri = UriUtils.decode(iri, "UTF-8"); + topConcept = skosRepository.conceptSubTree(ontologyId, decodedIri, narrower, index, obsoletes, lang, pageable); + + sb.append(topConcept.getIndex() + " , "+ topConcept.getData().any().get("label").toString() + " , " + topConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(topConcept, displayRelated)); + + return new HttpEntity(sb.toString()); + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/relations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = skosRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + + return new ResponseEntity<>( assembler.toModel(conceptPage), HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are displayed as text if the concept iri is provided in encoded format.") + @RequestMapping(path = "/{onto}/skos/{iri}/displayrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConcepts( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable, + PagedResourcesAssembler assembler) { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = skosRepository.findRelated(ontologyId, decodedIri, relationType.getPropertyName(),lang); + + final int start = (int)pageable.getOffset(); + final int end = Math.min((start + pageable.getPageSize()), related.size()); + Page conceptPage = new PageImpl<>(related.subList(start, end), pageable, related.size()); + int count = 0; + for (V2Entity individual : conceptPage.getContent()) + sb.append(++count).append(" , ").append(individual.any().get("label").toString()).append(" , ").append(individual.any().get("iri").toString()).append("\n"); + + return new HttpEntity<>( sb.toString()); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/indirectrelations", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity> findRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = skosRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); + + return new ResponseEntity<>( related, HttpStatus.OK); + + } + + @Operation(description = "Broader, Narrower and Related concept relations of a concept are listed in JSON if the concept iri is provided in encoded format. The relationship is identified indirectly based on the related concept's relation to the concept in question. This requires traversing all the available concepts and checking if they are related to the concept in question. For this reason, this method is relatively slower than the displayconceptrelations method. Nevertheless, it enables to identify unforeseen relations of the concept in question") + @RequestMapping(path = "/{onto}/skos/{iri}/displayindirectrelations", produces = {MediaType.TEXT_PLAIN_VALUE}, method = RequestMethod.GET) + @ResponseBody + public HttpEntity displayRelatedConceptsIndirectly( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @Parameter(description = "skos based concept relation type", required = true) + @RequestParam(value = "relation_type", required = true, defaultValue = "broader") + @Schema(type = "string", allowableValues = { "broader", "narrower", "related" }) SKOSRelation relationType, + @Parameter(description = "Page size to retrieve individuals", required = true) + @RequestParam(value = "obsoletes", required = false, defaultValue = "false") Boolean obsoletes, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang, + Pageable pageable) throws IOException { + StringBuilder sb = new StringBuilder(); + ontologyId = ontologyId.toLowerCase(); + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + related = skosRepository.findRelatedIndirectly(ontologyId, decodedIri, relationType.getPropertyName(), obsoletes,lang,pageable); + + int count = 0; + for (V2Entity individual : related) + sb.append(++count).append(" , ").append(individual.any().get("label").toString()).append(" , ").append(individual.any().get("iri").toString()).append("\n"); + + + return new ResponseEntity<>( sb.toString(), HttpStatus.OK); + + } + + @Operation(description = "Node and Edge definitions needed to visualize the nodes that are directly related with the subject term. Ontology ID and encoded iri are required. ") + @RequestMapping(path = "/{onto}/skos/{iri}/graph", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity retrieveImmediateGraph( + @Parameter(description = "ontology ID", required = true) + @PathVariable("onto") String ontologyId, + @Parameter(description = "encoded concept IRI", required = true) + @PathVariable("iri") String iri, + @RequestParam(value = "lang", required = false, defaultValue = "en") String lang){ + + List related = new ArrayList(); + String decodedIri = UriUtils.decode(iri, "UTF-8"); + + V2Entity subjectTerm = skosRepository.findByOntologyAndIri(ontologyId, decodedIri, lang); + + related = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.related.getPropertyName(), lang); + + List narrower = new ArrayList(); + narrower = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.narrower.getPropertyName(), lang); + + List broader = new ArrayList(); + broader = skosRepository.findRelated(ontologyId, decodedIri, SKOSRelation.broader.getPropertyName(), lang); + + Set relatedNodes = new HashSet(); + related.forEach(term -> relatedNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + Set narrowerNodes = new HashSet(); + narrower.forEach(term -> narrowerNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + Set broaderNodes = new HashSet(); + broader.forEach(term -> broaderNodes.add(new Node(term.any().get("iri").toString(), term.any().get("label").toString()))); + + Set edges = new HashSet(); + relatedNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "related",SKOSRelation.related.getPropertyName()))); + narrowerNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "narrower",SKOSRelation.narrower.getPropertyName()))); + broaderNodes.forEach(node -> edges.add(new Edge(decodedIri, node.getIri(), "broader",SKOSRelation.broader.getPropertyName()))); + + Set nodes = new HashSet(); + nodes.add(new Node(decodedIri,subjectTerm.any().get("label").toString())); + nodes.addAll(relatedNodes); + nodes.addAll(broaderNodes); + nodes.addAll(narrowerNodes); + + + Map graph = new HashMap(); + graph.put("nodes", nodes); + graph.put("edges", edges); + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + try { + return new ResponseEntity<>(ow.writeValueAsString(graph),HttpStatus.OK); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + public StringBuilder generateConceptHierarchyTextByOntology(TreeNode rootConcept, boolean displayRelated) { + StringBuilder sb = new StringBuilder(); + for (TreeNode childConcept : rootConcept.getChildren()) { + sb.append(childConcept.getIndex() + " , "+ childConcept.getData().any().get("label").toString() + " , " + childConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(childConcept,displayRelated)); + } + if(displayRelated) + for (TreeNode relatedConcept : rootConcept.getRelated()) { + sb.append(relatedConcept.getIndex() + " , "+ relatedConcept.getData().any().get("label").toString() + " , " + relatedConcept.getData().any().get("iri").toString()).append("\n"); + sb.append(generateConceptHierarchyTextByOntology(relatedConcept,displayRelated)); + } + return sb; + } + + @ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Resource not found") + @ExceptionHandler(ResourceNotFoundException.class) + public void handleError(HttpServletRequest req, Exception exception) { + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java index b613fc0dc..dde474cea 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2PropertyController.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; -@Controller +@RestController @RequestMapping("/api/v2") public class V2PropertyController { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java index 0af7b2460..503d8fba3 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/V2StatisticsController.java @@ -1,5 +1,9 @@ package uk.ac.ebi.spot.ols.controller.api.v2; +import com.google.common.collect.Sets; +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import org.apache.commons.collections4.map.MultiKeyMap; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; @@ -10,33 +14,83 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; -import org.springframework.stereotype.Controller; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.*; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v2.V2Statistics; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; +import uk.ac.ebi.spot.ols.repository.v2.V2OntologyRepository; import java.io.IOException; -import java.util.HashMap; -import java.util.Map; +import java.util.*; -@Controller -@RequestMapping("/api/v2/stats") +@RestController +@RequestMapping("/api/v2") public class V2StatisticsController { @Autowired OlsSolrClient solrClient; - @RequestMapping(produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + @Autowired + private V2OntologyRepository ontologyRepository; + + @Operation(description = "Get Whole System Statistics. Components in all ontologies are taken into consideration") + @RequestMapping(path = "/stats", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) public HttpEntity getStatistics() throws ResourceNotFoundException, IOException { + return new ResponseEntity<>( computeStats("*:*", null), HttpStatus.OK); + } + + @Operation(description = "Get Schema and Classification based Statistics. Possible schema keys and possible classification values of particular keys can be inquired with /api/ontologies/schemakeys and /api/ontologies/schemavalues methods respectively.") + @RequestMapping(path = "/statsby", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + public HttpEntity getStatistics( + @RequestParam(value = "schema", required = false) Collection schemas, + @RequestParam(value = "classification", required = false) Collection classifications, + @Parameter(description = "Set to true (default setting is false) for intersection (default behavior is union) of classifications.") + @RequestParam(value = "ontologyIds", required = false) Collection ontologyIds, + @RequestParam(value = "exclusive", required = false, defaultValue = "false") boolean exclusive, + @Parameter(description = "Use License option to filter based on license.label, license.logo and license.url variables. " + + "Use Composite Option to filter based on the objects (i.e. collection, subject) within the classifications variable. " + + "Use Linear option to filter based on String and Collection based variables.") + @RequestParam(value = "option", required = false, defaultValue = "COMPOSITE") FilterOption filterOption, + @RequestParam(value = "lang", defaultValue = "en") String lang) throws ResourceNotFoundException, IOException{ + + ontologyIds = ontologyRepository.filterOntologyIDs(schemas,classifications,ontologyIds,exclusive,filterOption,lang); + String lastLoaded = ontologyRepository.getLastLoaded(ontologyIds,lang).toString(); + StringBuilder sb = new StringBuilder(); + String queryString = "none"; + if(ontologyIds != null){ + for (String id : ontologyIds){ + sb.append("ontologyId:").append(id).append(" OR "); + } + queryString = sb.toString().substring(0,sb.toString().lastIndexOf(" OR ")); + } + return new ResponseEntity<>( computeStats(queryString, lastLoaded), HttpStatus.OK); + } + @Operation(description = "Get Composite Schema based Statistics. All schemas with their respective classifications under the classifications variable will be computed.") + @RequestMapping(path = "/allstatsbyschema", produces = {MediaType.APPLICATION_JSON_VALUE, MediaTypes.HAL_JSON_VALUE}, method = RequestMethod.GET) + HttpEntity getStatisticsBySchema( + @RequestParam(value = "schema", required = false) Set schemas, + @RequestParam(value = "lang", defaultValue = "en") String lang + ) throws IOException { + MultiKeyMap summaries = new MultiKeyMap(); + Set keys = schemas == null || schemas.isEmpty() ? ontologyRepository.getSchemaKeys(lang) : Sets.intersection(ontologyRepository.getSchemaKeys(lang),schemas); + for (String key : keys) { + Set values = ontologyRepository.getSchemaValues(Collections.singleton(key),lang); + for (String value : values) { + summaries.put(key,value, getStatistics(Collections.singleton(key),Collections.singleton(value), Collections.emptySet(),false,FilterOption.COMPOSITE,lang)); + } + } + + return new ResponseEntity<>( summaries, HttpStatus.OK); + } + + private V2Statistics computeStats(String queryString, String lastLoaded) throws IOException { Map coreStatus = solrClient.getCoreStatus(); Map indexStatus = (Map) coreStatus.get("index"); - String lastModified = (String) indexStatus.get("lastModified"); + String lastModified = lastLoaded == null ? (String) indexStatus.get("lastModified") : lastLoaded; SolrQuery query = new SolrQuery(); - - query.setQuery("*:*"); + query.setQuery(queryString); query.setFacet(true); query.addFacetField("type"); query.setRows(0); @@ -56,7 +110,6 @@ public HttpEntity getStatistics() throws ResourceNotFoundException stats.numberOfIndividuals = counts.containsKey("individual") ? counts.get("individual") : 0; stats.numberOfProperties = counts.containsKey("property") ? counts.get("property") : 0; - return new ResponseEntity<>( stats, HttpStatus.OK); + return stats; } - } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java index 80b1ade32..684084742 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/controller/api/v2/helpers/DynamicQueryHelper.java @@ -20,8 +20,8 @@ public static Map> filterProperties(Map creators; + + public Collection collection; + public Collection subject; //public Map> annotations; + + public Object classifications; + + public String repoUrl; + + public License license; public Object annotations; public String fileLocation; @@ -47,5 +58,9 @@ public class V1OntologyConfig { public Collection preferredRootTerms = new HashSet<>(); public boolean isSkos; + public boolean skosNarrower; + + public TopConceptEnum skosRoot; + public boolean allowDownload; } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java index c0a142d76..7279c28fa 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/neo4j/OlsNeo4jClient.java @@ -155,6 +155,39 @@ public Page recursivelyTraverseIncomingEdges(String type, String id return neo4jClient.queryPaginated(query, "c", countQuery, parameters("id", id), pageable); } + public Page recursivelyTraverseOutgoingEdgesWithSiblings(String type, String id, String ontologyId, List edgeIRIs, Map edgeProps, Pageable pageable) { + + String edge = makeEdgesList(edgeIRIs, Map.of()); + + String query = + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "RETURN DISTINCT ancestor as a " + + "UNION " + + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "OPTIONAL MATCH (ancestor)<-[:" + edge + "]-(descendant) " + + "RETURN DISTINCT descendant as a "; + + String countQuery = + "CALL {" + + "MATCH (a:" + type + ") WHERE a.id = $id " + + "WITH a " + + "OPTIONAL MATCH (a)-[edge:" + edge + " *]->(ancestor) " + + "RETURN DISTINCT ancestor as a " + + "UNION " + + "MATCH (c:" + type + ") WHERE c.id = $id " + + "WITH c " + + "OPTIONAL MATCH (c)-[edge:" + edge + " *]->(ancestor) " + + "OPTIONAL MATCH (ancestor)<-[:" + edge + "]-(descendant) " + + "RETURN DISTINCT descendant as a " + + "}" + + "RETURN count(*)"; + + return neo4jClient.queryPaginated(query, "a", countQuery, parameters("type", type, "id", id, "ontologyId", ontologyId), pageable); + } private static String makeEdgesList(List edgeIRIs, Map edgeProperties) { diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java index a1cb659e4..75ba375dd 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/solr/OlsSolrClient.java @@ -2,6 +2,7 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; +import com.google.gson.JsonObject; import com.google.gson.JsonParser; import org.apache.http.HttpEntity; import org.apache.http.client.methods.CloseableHttpResponse; @@ -17,14 +18,17 @@ import org.apache.solr.common.SolrDocument; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; import javax.validation.constraints.NotNull; import java.io.IOException; import java.net.URLDecoder; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; +import java.util.Set; import java.util.stream.Collectors; @@ -95,12 +99,34 @@ public JsonElement getFirst(OlsSolrQuery query) { if(qr.getResults().getNumFound() < 1) { logger.debug("Expected at least 1 result for solr getFirst for solr query = {}", query.constructQuery().jsonStr()); + //return new JsonObject(); throw new RuntimeException("Expected at least 1 result for solr getFirst"); } return getOlsEntityFromSolrResult(qr.getResults().get(0)); } + public JsonElement getByIndex(OlsSolrQuery query, int i) { + + QueryResponse qr = runSolrQuery(query, null); + + if(qr.getResults().getNumFound() < 1) { + logger.debug("Expected at least 1 result for solr getFirst for solr query = {}", query.constructQuery().jsonStr()); + throw new RuntimeException("Expected at least 1 result for solr getFirst"); + } + + return getOlsEntityFromSolrResult(qr.getResults().get(i)); + } + + public Set getSet(OlsSolrQuery query){ + Set tempSet = new HashSet<>(); + QueryResponse qr = runSolrQuery(query, PageRequest.of(0, MAX_ROWS)); + for (int i = 0; i getValues(JsonObject json, String predicate) { return List.of(value); } + public static JsonObject getValue(JsonObject json, String predicate) { + JsonElement value = json.get(predicate); + if (value != null) + if (value.isJsonObject()) { + return value.getAsJsonObject(); + } + return null; + } + public static List getStrings(JsonObject json, String predicate) { return getValues(json, predicate).stream().map(JsonHelper::objectToString).collect(Collectors.toList()); } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java new file mode 100644 index 000000000..1ca07e9b3 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/TreeNode.java @@ -0,0 +1,108 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; + +public class TreeNode implements Serializable { + + /** + * + */ + private static final long serialVersionUID = -343190255910189166L; + private Collection> children = new ArrayList>(); + private Collection> related = new ArrayList>(); + private Collection> parent = new ArrayList>(); + private String index; + private T data = null; + + public TreeNode(T data) { + this.data = data; + } + + public TreeNode(T data, Collection> parent) { + this.data = data; + this.parent = parent; + } + + public Collection> getChildren() { + return children; + } + public void setChildren(Collection> children) { + this.children = children; + } + + public void addChild(T data) { + TreeNode child = new TreeNode(data); + this.children.add(child); + } + + public void addChild(TreeNode child) { + this.children.add(child); + } + + public void addRelated(T data) { + TreeNode related = new TreeNode(data); + this.related.add(related); + } + + public void addRelated(TreeNode related) { + this.related.add(related); + } + + public void addParent(T data) { + TreeNode parent = new TreeNode(data); + this.parent.add(parent); + } + + public void addParent(TreeNode parent) { + this.parent.add(parent); + } + + public Collection> getRelated() { + return related; + } + public void setRelated(Collection> related) { + this.related = related; + } + public Collection> getParent() { + return parent; + } + public void setParent(Collection> parent) { + this.parent = parent; + } + public String getIndex() { + return index; + } + public void setIndex(String index) { + this.index = index; + } + + public T getData() { + return this.data; + } + + public void setData(T data) { + this.data = data; + } + + public boolean isRoot() { + return this.parent.size() == 0; + } + + public boolean isLeaf() { + return this.children.size() == 0; + } + + public void resetParent() { + this.parent = new ArrayList>(); + } + + public void resetChildren() { + this.children = new ArrayList>(); + } + + public void resetRelated() { + this.related = new ArrayList>(); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java new file mode 100644 index 000000000..a27af17ae --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1FullJsTreeBuilder.java @@ -0,0 +1,250 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_DIRECT_CHILDREN; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Base64; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.HashMultimap; +import com.google.common.collect.Multimap; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; + +/** + * @author Deepan Anbalagan + * @email deepan.anbalagan@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class V1FullJsTreeBuilder { + + JsonObject thisEntity; + List parentRelationIRIs; + Set entities = new LinkedHashSet<>(); + Map entityIriToEntity = new HashMap<>(); + Multimap entityIriToChildIris = HashMultimap.create(); + Set toBeOpenedIris = new HashSet<>(); + + public V1FullJsTreeBuilder(JsonElement thisEntity, List ancestors, List parentRelationIRIs) { + + this.thisEntity = thisEntity.getAsJsonObject(); + this.parentRelationIRIs = parentRelationIRIs; + + // 1. put all entities (this entity + all ancestors) into an ordered set + + entities.add(thisEntity); + String thisEntityIri = (String) thisEntity.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(); + ancestors.parallelStream() + .filter(element -> { + return !((String) element.getAsJsonObject().getAsJsonPrimitive("iri").getAsString()).equals(thisEntityIri); + }) + .forEach(entities::add); + + // 2. establish map of IRI -> entity + + for(JsonElement entity : entities) { + entityIriToEntity.put((String) entity.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(), entity); + } + + // 3. establish map of IRI -> children + + for(String entityIri : entityIriToEntity.keySet()) { + + JsonElement entity = entityIriToEntity.get(entityIri); + + for (String parentIri : getEntityParentIRIs(entity)) { + entityIriToChildIris.put(parentIri, entity.getAsJsonObject().get("iri").getAsString()); + } + } + + // 4. Get all Iri which needs to be opened + getAllIrisToBeOpen(); + } + + private void getAllIrisToBeOpen() { + Set unVisitedKeys = entityIriToChildIris.keySet(); + String selectedEntityIri = JsonHelper.getString(thisEntity, "iri"); + + for(String key : unVisitedKeys) { + // Check if the current key or any of its descendants contain the selectedEntityIri + if (checkIrisTobeOpen(key, selectedEntityIri)) { + toBeOpenedIris.add(key); + } + } + } + + private boolean checkIrisTobeOpen(String key, String selectedEntityIri) { + + // Check if the current key directly contains the selectedEntityIri + if (entityIriToChildIris.get(key).contains(selectedEntityIri)) { + toBeOpenedIris.add(key); + return true; + } + + // Recursively check children for the selectedEntityIri + for (String childKey : entityIriToChildIris.get(key)) { + if (checkIrisTobeOpen(childKey, selectedEntityIri)) { + toBeOpenedIris.add(key); + return true; + } + } + + return false; + } + + List> buildJsTree() { + + // 1. establish roots (entities with no parents) + + List roots = entities.stream() + .filter(entity -> getEntityParentIRIs(entity).size() == 0) + .collect(Collectors.toList()); + + // 2. build jstree entries starting with roots + + List> jstree = new ArrayList<>(); + + for(JsonElement root : roots) { + createJsTreeEntries(jstree, root.getAsJsonObject(), null); + } + + // 3. Retrieve parentIds which are not opened but has children nodes + Set parentIdsToBeRemoved = new HashSet<>(); + for (Map tree : jstree) { + // Check if the current tree map has a "parent" key that is not "#" + if (tree.containsKey("parent") && !"#".equals(tree.get("parent"))) { + String parentValue = (String) tree.get("parent"); + + // Find entries with matching "id" and where "opened" is false + jstree.stream() + .filter(tmpTree -> parentValue.equals(tmpTree.get("id"))) + .filter(tmpTree -> { + Map state = (Map) tmpTree.get("state"); + return state != null && Boolean.FALSE.equals(state.get("opened")); + }) + .map(tmpTree -> (String) tmpTree.get("id")) + .forEach(parentIdsToBeRemoved::add); + } + } + + // 4. Remove nodes which has parentIds retrieved in previous step(Step 3) + jstree.removeIf(map -> map.entrySet() + .stream() + .anyMatch(entry -> "parent".equals(entry.getKey()) && parentIdsToBeRemoved.contains(entry.getValue())) + ); + + return jstree; + } + + private void createJsTreeEntries(List> jstree, JsonObject entity, String concatenatedParentIris) { + + String entityIri = JsonHelper.getString(entity, "iri"); + + Map jstreeEntry = new LinkedHashMap<>(); + + if(concatenatedParentIris != null) { + jstreeEntry.put("id", base64Encode(concatenatedParentIris + ";" + entityIri)); + jstreeEntry.put("parent", base64Encode(concatenatedParentIris)); + } else { + jstreeEntry.put("id", base64Encode(entityIri)); + jstreeEntry.put("parent", "#"); + } + + jstreeEntry.put("iri", entityIri); + jstreeEntry.put("text", JsonHelper.getString(entity, "label")); + + Collection childIris = entityIriToChildIris.get(entityIri); + + // only the leaf node is selected (= highlighted in the tree) + boolean selected = JsonHelper.getString(thisEntity, "iri").equals(entityIri); + + // only nodes that aren't the leaf node are marked as opened (expanded) + boolean opened = toBeOpenedIris.contains(entityIri); + + + boolean hasDirectChildren = Objects.equals(JsonHelper.getString(entity, HAS_DIRECT_CHILDREN.getText()), "true"); + boolean hasHierarchicalChildren = Objects.equals(JsonHelper.getString(entity, HAS_DIRECT_CHILDREN.getText()), "true"); + + // only nodes that aren't already opened are marked as having children, (iff they actually have children!) + boolean children = (hasDirectChildren || hasHierarchicalChildren); + + Map state = new LinkedHashMap<>(); + state.put("opened", opened); + state.put("selected", selected); + + jstreeEntry.put("state", state); + jstreeEntry.put("children", children); + + Map attrObj = new LinkedHashMap<>(); + attrObj.put("iri", JsonHelper.getString(entity, "iri")); + attrObj.put("ontology_name", JsonHelper.getString(entity, "ontologyId")); + attrObj.put("title", JsonHelper.getString(entity, "iri")); + attrObj.put("class", "is_a"); + jstreeEntry.put("a_attr", attrObj); + + jstreeEntry.put("ontology_name", JsonHelper.getString(entity, "ontologyId")); + + jstree.add(jstreeEntry); + + for(String childIri : childIris) { + + JsonElement child = entityIriToEntity.get(childIri); + + if(child == null) { + // child is not in this tree (i.e. cousin of the node requested, will not be displayed) + continue; + } + + if(concatenatedParentIris != null) { + createJsTreeEntries(jstree, child.getAsJsonObject(), concatenatedParentIris + ";" + entityIri); + } else { + createJsTreeEntries(jstree, child.getAsJsonObject(), entityIri); + } + } + } + + private Set getEntityParentIRIs(JsonElement entity) { + + List parents = new ArrayList<>(); + + for(String parentRelationIri : parentRelationIRIs) { + parents.addAll( JsonHelper.getValues(entity.getAsJsonObject(), parentRelationIri) ); + } + + Set parentIris = new LinkedHashSet<>(); + + for (JsonElement parent : parents) { + + // extract value from reified parents + while(parent.isJsonObject()) { + parent = parent.getAsJsonObject().get("value"); + } + + String parentIri = parent.getAsString(); + + if(parentIri.equals("http://www.w3.org/2002/07/owl#Thing") + || parentIri.equals("http://www.w3.org/2002/07/owl#TopObjectProperty")) { + continue; + } + + parentIris.add(parentIri); + } + + return parentIris; + } + + static String base64Encode(String str) { + return Base64.getEncoder().encodeToString(str.getBytes(StandardCharsets.UTF_8)); + } +} + diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java index 7214e1a7f..f411d6ff1 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1GraphRepository.java @@ -1,15 +1,18 @@ package uk.ac.ebi.spot.ols.repository.v1; -import com.google.gson.Gson; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonParser; +import com.google.gson.*; import org.neo4j.driver.types.Node; import org.neo4j.driver.types.Relationship; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.v1.V1Individual; +import uk.ac.ebi.spot.ols.model.v1.V1Term; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.transforms.RemoveLiteralDatatypesTransform; +import uk.ac.ebi.spot.ols.repository.v1.mappers.V1IndividualMapper; +import uk.ac.ebi.spot.ols.repository.v1.mappers.V1TermMapper; import uk.ac.ebi.spot.ols.service.Neo4jClient; import static uk.ac.ebi.ols.shared.DefinedFields.*; @@ -17,6 +20,8 @@ import java.util.*; import java.util.stream.Collectors; +import static org.neo4j.driver.Values.parameters; + @Component public class V1GraphRepository { @@ -45,6 +50,7 @@ public Map getGraphForIndividual(String iri, String ontologyId, private Map getGraphForEntity(String iri, String type, String neo4jType, String ontologyId, String lang) { String thisEntityId = ontologyId + "+" + type + "+" + iri; + List selectedNode = new ArrayList<>(); // String parentsQuery = // "MATCH path = (n:OntologyClass)-[r:directParent|relatedTo]-(parent)\n" @@ -72,6 +78,10 @@ private Map getGraphForEntity(String iri, String type, String ne List> nodes = allNodes.stream().map(node -> { JsonObject ontologyNodeObject = getOntologyNodeJson(node, lang); + + if(iri.equals(JsonHelper.getString(ontologyNodeObject, "iri"))) { + selectedNode.add(ontologyNodeObject); + } JsonObject linkedEntities = ontologyNodeObject.getAsJsonObject("linkedEntities"); if(linkedEntities != null) { @@ -100,7 +110,7 @@ private Map getGraphForEntity(String iri, String type, String ne JsonObject ontologyEdgeObject = getOntologyEdgeJson(relationship, lang); - String uri = JsonHelper.getString(ontologyEdgeObject, "property"); + String uri = resolveUri(result, selectedNode, iri); if (uri == null) { uri = "http://www.w3.org/2000/01/rdf-schema#subClassOf"; } @@ -150,10 +160,56 @@ Map getRelatedFrom(String entityId) { return (Map) results.get(0).get("result"); } + public Page getRelatedFromPaginated(String entityId, String lang, Pageable pageable) { + String query = "MATCH (x:OntologyClass)-[r:relatedTo]->(n:OntologyClass) WHERE n.id= $id RETURN x"; + String countQuery = "MATCH (x:OntologyClass)-[r:relatedTo]->(n:OntologyClass) WHERE n.id= $id RETURN count(x)"; - JsonObject getOntologyNodeJson(Node node, String lang) { + return neo4jClient.queryPaginated(query, "x", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + + public Page getSuperClassPaginated(String entityId, String lang, Pageable pageable) { + String query = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2000/01/rdf-schema#subClassOf`]->(b:OntologyClass) " + + "WHERE a.id = $id RETURN b"; + String countQuery = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2000/01/rdf-schema#subClassOf`]->(b:OntologyClass) " + + "WHERE a.id = $id RETURN count(b)"; + + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + + public Page getEquivalentClassPaginated(String entityId, String lang, Pageable pageable) { + String query = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + + "WHERE a.id = $id RETURN DISTINCT b"; + String countQuery = + "MATCH (a:OntologyClass)-[r:`http://www.w3.org/2002/07/owl#equivalentClass`]-(b:OntologyClass) " + + "WHERE a.id = $id RETURN count(DISTINCT b)"; - JsonElement ontologyNodeObject = JsonParser.parseString((String) node.asMap().get("_json")); + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1TermMapper.mapTerm(record, lang)); + } + + public Page getTermInstancesPaginated(String entityId, String lang, Pageable pageable) { + String query = + "MATCH (a:OntologyClass)<-[r:`http://www.w3.org/1999/02/22-rdf-syntax-ns#type`]-(b:OntologyIndividual) " + + "WHERE a.id = $id RETURN b"; + String countQuery = + "MATCH (a:OntologyClass)<-[r:`http://www.w3.org/1999/02/22-rdf-syntax-ns#type`]-(b:OntologyIndividual) " + + "WHERE a.id = $id RETURN count(b)"; + + return neo4jClient.queryPaginated(query, "b", countQuery, parameters("id", entityId), pageable).map(record -> V1IndividualMapper.mapIndividual(record, lang)); + } + + public String getTermJson(String entityId) { + String query = "MATCH (a:OntologyClass) WHERE a.id = '"+entityId+"' RETURN a._json AS result"; + List> results = neo4jClient.rawQuery(query); + return results.get(0).get("result").toString(); + } + + JsonObject getOntologyNodeJson(Node node, String lang) { + JsonElement ontologyNodeObject = new JsonObject(); + if(node.asMap().get("_json") != null && node.asMap().get("_json") instanceof String) + ontologyNodeObject = JsonParser.parseString((String) node.asMap().get("_json")); return RemoveLiteralDatatypesTransform.transform( LocalizationTransform.transform(ontologyNodeObject, lang) @@ -161,13 +217,57 @@ JsonObject getOntologyNodeJson(Node node, String lang) { } JsonObject getOntologyEdgeJson(Relationship r, String lang) { - - JsonElement ontologyEdgeObject = JsonParser.parseString((String) r.asMap().get("_json")); + JsonElement ontologyEdgeObject = new JsonObject(); + if(r.asMap().get("_json") != null && r.asMap().get("_json") instanceof String) + ontologyEdgeObject = JsonParser.parseString((String) r.asMap().get("_json")); return RemoveLiteralDatatypesTransform.transform( LocalizationTransform.transform(ontologyEdgeObject, lang) ).getAsJsonObject(); } + private String resolveUri(Map result, List selectedNode, String iri) { + if (selectedNode == null || selectedNode.isEmpty()) { + return null; + } + + JsonObject selectedOntologyNodeObject = selectedNode.get(0); + if (iri.equals(result.get("source"))) { + return getRelatedProperty(selectedOntologyNodeObject, "relatedTo", (String) result.get("target")); + } else { + return getRelatedProperty(selectedOntologyNodeObject, "relatedFrom", (String) result.get("source")); + } + } + + private String getRelatedProperty(JsonObject ontologyNode, String relationKey, String targetOrSourceIri) { + if (ontologyNode == null || !ontologyNode.has(relationKey)) { + return null; + } + + JsonElement related = ontologyNode.get(relationKey); + if (related == null || related instanceof JsonNull) { + return null; + } + + if (related.isJsonArray()) { + for (JsonElement element : related.getAsJsonArray()) { + JsonObject relationObject = element.getAsJsonObject(); + if (relationObject != null + && targetOrSourceIri.equals(JsonHelper.getString(relationObject, "value")) + && relationObject.has("property")) { + return JsonHelper.getString(relationObject, "property"); + } + } + } else if (related.isJsonObject()) { + JsonObject relationObject = related.getAsJsonObject(); + if (relationObject != null + && targetOrSourceIri.equals(JsonHelper.getString(relationObject, "value")) + && relationObject.has("property")) { + return JsonHelper.getString(relationObject, "property"); + } + } + + return null; + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java new file mode 100644 index 000000000..3129f769f --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1JsTreeRepositoryExtn.java @@ -0,0 +1,125 @@ +package uk.ac.ebi.spot.ols.repository.v1; + +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_DIRECT_PARENTS; +import static uk.ac.ebi.ols.shared.DefinedFields.HAS_HIERARCHICAL_PARENTS; +import static uk.ac.ebi.ols.shared.DefinedFields.IS_OBSOLETE; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Component; + +import com.google.gson.JsonElement; + +import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; +import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; +import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; +import uk.ac.ebi.spot.ols.repository.solr.SearchType; +import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; +import uk.ac.ebi.spot.ols.service.ViewMode; + +/** + * @author Deepan Anbalagan + * @email deepan.anbalagan@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +@Component +public class V1JsTreeRepositoryExtn { + + @Autowired + OlsNeo4jClient neo4jClient; + + @Autowired + OlsSolrClient solrClient; + + @Autowired + V1JsTreeRepository v1JsTreeRepository; + + public List> getJsTreeForClassByViewMode(String iri, String ontologyId, String lang, ViewMode viewMode, + boolean sibling) { + + return getJSFullTree(iri, "class", "OntologyClass", ontologyId, lang, viewMode, sibling); + } + + public List> getJsTreeForPropertyByViewMode(String iri, String ontologyId, String lang, ViewMode viewMode, + boolean sibling) { + + return getJSFullTree(iri, "property", "OntologyProperty", ontologyId, lang, viewMode, sibling); + } + + private List> getJSFullTree(String iri, String type, String neo4jType, + String ontologyId, String lang, ViewMode viewMode, boolean sibling) { + + List parentRelationIRIs = List.of("directParent"); + + String thisEntityId = ontologyId + "+" + type + "+" + iri; + + JsonElement thisEntity = neo4jClient.getOne(neo4jType, Map.of("id", thisEntityId)); + thisEntity = LocalizationTransform.transform(thisEntity, lang); + + switch (viewMode) { + case ALL: + if (sibling) { + List ancestorsWithSiblings = neo4jClient + .recursivelyTraverseOutgoingEdgesWithSiblings(neo4jType, thisEntityId, ontologyId, + parentRelationIRIs, Map.of(), PageRequest.ofSize(100)) + .getContent(); + + ancestorsWithSiblings = ancestorsWithSiblings.stream() + .map(ancestor -> LocalizationTransform.transform(ancestor, lang)).collect(Collectors.toList()); + + // 1. Collect all "iri" values from ancestorsWithSiblings + Set ancestorIris = ancestorsWithSiblings.parallelStream() + .map(ancestor -> ancestor.getAsJsonObject().getAsJsonPrimitive("iri").getAsString()) + .collect(Collectors.toSet()); + // 2. Get Root elements by ontologyId + List roots = getRoots(ontologyId, type, false, lang, PageRequest.ofSize(100)); + + // 3. Add only unique elements from roots to ancestors based on "iri" + ancestorsWithSiblings.addAll(roots.stream().filter(root -> { + String rootIri = root.getAsJsonObject().getAsJsonPrimitive("iri").getAsString(); + return !ancestorIris.contains(rootIri); + }) + .collect(Collectors.toList())); + + return (new V1FullJsTreeBuilder(thisEntity, ancestorsWithSiblings, parentRelationIRIs)).buildJsTree(); + } else { + return getDefaultJsTreeByType(iri, ontologyId, lang, type); + } + + default: + return getDefaultJsTreeByType(iri, ontologyId, lang, type); + } + } + + private List getRoots(String ontologyId, String type, boolean obsolete, String lang, Pageable pageable) { + + OlsSolrQuery query = new OlsSolrQuery(); + query.addFilter("type", List.of(type), SearchType.WHOLE_FIELD); + query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); + query.addFilter(HAS_DIRECT_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); + query.addFilter(HAS_HIERARCHICAL_PARENTS.getText(), List.of("false"), SearchType.WHOLE_FIELD); + + if (!obsolete) + query.addFilter(IS_OBSOLETE.getText(), List.of("false"), SearchType.WHOLE_FIELD); + + return solrClient.searchSolrPaginated(query, pageable).stream().collect(Collectors.toList()); + } + + private List> getDefaultJsTreeByType(String iri, String ontologyId, String lang, String type){ + + switch (type) { + case "class": + return v1JsTreeRepository.getJsTreeForClass(iri, ontologyId, lang); + case "property": + return v1JsTreeRepository.getJsTreeForProperty(iri, ontologyId, lang); + default: + return null; + } + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java index 61fa1c249..2f5a2449d 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1OntologyRepository.java @@ -1,18 +1,23 @@ package uk.ac.ebi.spot.ols.repository.v1; +import com.google.common.collect.Sets; +import com.google.gson.JsonElement; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; +import uk.ac.ebi.spot.ols.model.License; import uk.ac.ebi.spot.ols.repository.solr.SearchType; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrQuery; import uk.ac.ebi.spot.ols.repository.solr.OlsSolrClient; import uk.ac.ebi.spot.ols.repository.Validation; import uk.ac.ebi.spot.ols.repository.v1.mappers.V1OntologyMapper; -import java.util.List; +import java.lang.reflect.Field; +import java.util.*; @Component public class V1OntologyRepository { @@ -26,12 +31,24 @@ public V1Ontology get(String ontologyId, String lang) { Validation.validateOntologyId(ontologyId); OlsSolrQuery query = new OlsSolrQuery(); - query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); - query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + query.addFilter("ontologyId", List.of(ontologyId), SearchType.WHOLE_FIELD); return V1OntologyMapper.mapOntology(solrClient.getFirst(query), lang); } + public Set getAll(String lang){ + Set tempSet = new HashSet<>(); + Validation.validateLang(lang); + + OlsSolrQuery query = new OlsSolrQuery(); + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + + for (JsonElement element : solrClient.getSet(query)) + tempSet.add(V1OntologyMapper.mapOntology(element, lang)); + return tempSet; + } + public Page getAll(String lang, Pageable pageable) { Validation.validateLang(lang); @@ -42,4 +59,272 @@ public Page getAll(String lang, Pageable pageable) { return solrClient.searchSolrPaginated(query, pageable) .map(result -> V1OntologyMapper.mapOntology(result, lang)); } + + public Collection filterOntologyIDs(Collection schemas, Collection classifications, Collection ontologies, boolean exclusiveFilter, FilterOption filterOption, String lang){ + if (schemas != null) + schemas.remove(""); + if (classifications != null) + classifications.remove(""); + if(ontologies != null) + ontologies.remove(""); + if((schemas == null || schemas.size() == 0 ) && (classifications == null || classifications.size() == 0 ) && (ontologies == null || ontologies.size() == 0)) + return null; + if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) + return ontologies; + Set documents; + if(FilterOption.COMPOSITE == filterOption) + documents = filterComposite(schemas, classifications, exclusiveFilter,lang); + else if (FilterOption.LINEAR == filterOption) + documents = filter(schemas, classifications, exclusiveFilter,lang); + else + documents = filterLicense(schemas, classifications, exclusiveFilter,lang); + Set filteredOntologySet = new HashSet(); + for (V1Ontology document : documents){ + filteredOntologySet.add(document.ontologyId); + } + System.out.println("filteredOntologySet: "+filteredOntologySet); + if (( ontologies == null || ontologies.size() == 0) && filteredOntologySet.size() > 0) + return filteredOntologySet; + else if (schemas != null) + if ((ontologies == null || ontologies.size() == 0) && (schemas.size() > 0 || classifications.size() > 0 )) + return new HashSet(Arrays.asList("nosuchontologyfound")); + + Set postFilterOntologySet; + + if(ontologies == null){ + ontologies = new HashSet(); + System.out.println("ontologies == null"); + } else { + ontologies = new HashSet(ontologies); + System.out.println("ontologies <> null"); + } + + System.out.println("ontologies: "+ontologies); + if (exclusiveFilter){ + postFilterOntologySet = Sets.intersection(filteredOntologySet,new HashSet(ontologies)); + System.out.println("intersection"); + } else { + postFilterOntologySet = Sets.union(filteredOntologySet,new HashSet(ontologies)); + System.out.println("union"); + } + if(postFilterOntologySet.size() == 0) + postFilterOntologySet = new HashSet(Arrays.asList("nosuchontologyfound")); + return postFilterOntologySet; + } + + public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + if(exclusive) + return exclusiveFilter(schemas,classifications,lang); + else + return inclusiveFilter(schemas,classifications,lang); + } + public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getAll(lang)); + + for (V1Ontology ontology : tempSet){ + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (schemas.contains(field.getName())){ + try { + if(field.get(ontology.config) != null) + if (Collection.class.isAssignableFrom(field.getType())) { + for (String ontologyClassification : (Collection) field.get(ontology.config)){ + if(classifications.contains(ontologyClassification)) + filteredSet.add(ontology); + } + } else if (String.class.isAssignableFrom(field.getType())) { + if(field.get(ontology.config) != null) + if(classifications.contains(field.get(ontology.config))) + filteredSet.add(ontology); + } + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + } + } + return filteredSet; + } + + public Set exclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getAll(lang)); + + for (V1Ontology ontology : tempSet){ + Set fieldSet = new HashSet<>(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + fieldSet.add(field.getName()); + } + if (fieldSet.containsAll(schemas)){ + Set tempClassifications = new HashSet(); + for (Field field : ontology.config.getClass().getDeclaredFields()){ + if (Collection.class.isAssignableFrom(field.getType())){ + try { + if(field.get(ontology.config) != null) + for (String classification : classifications){ + if(((Collection) field.get(ontology.config)).contains(classification)) + tempClassifications.add(classification); + } + + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + else if (String.class.isAssignableFrom(field.getType())) { + try { + if(field.get(ontology.config) != null) + if(classifications.contains((String) field.get(ontology.config))) + tempClassifications.add( (String) field.get(ontology.config)); + } catch (IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + } + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + } + } + return filteredSet; + } + + public Set filterComposite(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + if(schemas != null && classifications != null) + if(!exclusive) { + for (V1Ontology ontologyDocument : getAll(lang)) { + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } + + } + } + } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { + String schema = schemas.iterator().next(); + String classification = classifications.iterator().next(); + System.out.println("schema: "+schema); + System.out.println("classification: "+classification); + for (V1Ontology ontologyDocument : getAll(lang)){ + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } + } + } + } else { + for (V1Ontology ontologyDocument : getAll(lang)) { + Set tempClassifications = new HashSet(); + if(ontologyDocument.config.classifications != null) + if (!((Collection>>) ontologyDocument.config.classifications).isEmpty()) { + for (Map> classificationSchema : (Collection>>) ontologyDocument.config.classifications) { + for (String schema : schemas) + if (classificationSchema.containsKey(schema)) { + for (String classification : classifications) { + if (classificationSchema.get(schema) != null) { + if (!classificationSchema.get(schema).isEmpty()) { + if (classificationSchema.get(schema).contains(classification)) { + tempClassifications.add(classification); + } + } + } + } + } + } + if (tempClassifications.containsAll(classifications)) + tempSet.add(ontologyDocument); + } + } + } + return tempSet; + } + + + public Set filterLicense(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getAll(lang)); + + for (V1Ontology ontology : tempSet){ + if (ontology.config.license != null){ + License license = ontology.config.license; + String label = license.getLabel() != null ? (String) license.getLabel() : ""; + String logo = license.getLogo() != null ? (String) license.getLogo() : ""; + String url = license.getUrl() != null ? (String) license.getUrl() : ""; + if (exclusive){ + Set tempClassifications = new HashSet(); + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + tempClassifications.add("license.label"); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + tempClassifications.add("license.logo"); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + tempClassifications.add("license.url"); + + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + + } else { + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + filteredSet.add(ontology); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + filteredSet.add(ontology); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + filteredSet.add(ontology); + } + } + } + + return filteredSet; + } + + public Set getSchemaKeys(String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getAll(lang)); + Set keys = new HashSet<>(); + for (V1Ontology ontology : tempSet){ + if (ontology.config.classifications != null){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + keys.addAll(((Map>) o).keySet()); + } + } + } + return keys; + } + + public Set getSchemaValues(Collection schemas,String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getAll(lang)); + Set values = new HashSet<>(); + for (V1Ontology ontology : tempSet){ + if (ontology.config.classifications != null){ + Collection temp = (Collection) ontology.config.classifications; + for (Object o : temp){ + for (Map.Entry> entry : ((Map>) o).entrySet()) + for (String value : entry.getValue()) + if(schemas.contains(entry.getKey())) + values.add(value); + } + } + } + return values; + } + } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java index fa84de098..f66e7c264 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/V1TermRepository.java @@ -4,6 +4,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.cache.annotation.Cacheable; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.stereotype.Component; @@ -17,9 +18,13 @@ import uk.ac.ebi.spot.ols.repository.v1.mappers.V1TermMapper; import static uk.ac.ebi.ols.shared.DefinedFields.*; +import java.util.ArrayList; import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; +import java.util.Set; @Component public class V1TermRepository { @@ -307,4 +312,225 @@ public Page getInstances(String ontologyId, String iri, Pageable p throw new RuntimeException(); } + + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + List> rootTerms = new ArrayList>(); + int count = 0; + + if(schema) { + for (V1Term term : listOfTerms) + if (term.annotation.get("hasTopConcept") != null) { + for (String iriTopConcept : (LinkedHashSet) term.annotation.get("hasTopConcept")) { + V1Term topConceptTerm = findTerm(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + } + rootTerms.add(topConcept); + } + } + } else for (V1Term term : listOfTerms) { + TreeNode tree = new TreeNode(term); + + if (tree.isRoot() && term.annotation.get("topConceptOf") != null) { + tree.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(term,tree,listOfTerms); + else + populateChildrenandRelatedByBroader(term,tree,listOfTerms); + } + rootTerms.add(tree); + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTreeWithoutTop (String ontologyId, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + Set rootIRIs = new HashSet(); + List> rootTerms = new ArrayList>(); + int count = 0; + if(!narrower) { + for (V1Term term : listOfTerms) { + if(term.annotation != null && term.annotation.get("broader") != null) { + for (String iriBroader : (LinkedHashSet) term.annotation.get("broader")) { + V1Term broaderTerm = findTerm(listOfTerms, iriBroader); + if (broaderTerm.annotation != null && broaderTerm.annotation.get("broader") == null) { + rootIRIs.add(iriBroader); + } + + } + } + } + + for (String iri : rootIRIs) { + V1Term topConceptTerm = findTerm(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + rootTerms.add(topConcept); + } + + } else { + for (V1Term term : listOfTerms) { + if (term.annotation != null && term.annotation.get("narrower") != null) { + boolean root = true; + for (V1Term v1Term : listOfTerms) { + if (v1Term.annotation != null && v1Term.annotation.get("narrower") != null) { + for (String iriNarrower : (LinkedHashSet) v1Term.annotation.get("narrower")) { + if (term.iri.equals(iriNarrower)) + root = false; + } + } + } + + if (root) { + TreeNode topConcept = new TreeNode(term); + topConcept.setIndex(String.valueOf(++count)); + if (withChildren) + populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); + rootTerms.add(topConcept); + } + } + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") + public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable){ + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + V1Term topConceptTerm = findTerm(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(index); + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + + return topConcept; + } + + public V1Term findTerm(List wholeList, String iri) { + for (V1Term term : wholeList) + if(term.iri.equals(iri)) + return term; + return new V1Term(); + } + + public List findRelated(String ontologyId, String iri, String relationType, String lang) { + List related = new ArrayList(); + V1Term term = this.findByOntologyAndIri(ontologyId, iri, lang); + if (term != null) + if (term.annotation.get(relationType) != null) + for (String iriBroader : (LinkedHashSet) term.annotation.get(relationType)) + related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); + + return related; + } + + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable){ + List related = new ArrayList(); + + V1Term v1Term = this.findByOntologyAndIri(ontologyId, iri, lang); + if(v1Term == null) + return related; + if(v1Term.iri == null) + return related; + + Page terms = this.findAllByOntology(ontologyId, obsoletes, lang, pageable); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = this.findAllByOntology(ontologyId, obsoletes, lang, terms.nextPageable()); + listOfTerms.addAll(terms.getContent()); + } + + for (V1Term term : listOfTerms) { + if (term != null) + if (term.annotation.get(relationType) != null) + for (String iriRelated : (LinkedHashSet) term.annotation.get(relationType)) + if(iriRelated.equals(iri)) + related.add(term); + } + + return related; + } + + public void populateChildrenandRelatedByNarrower(V1Term term, TreeNode tree, List listOfTerms ) { + + if (term.annotation != null) + for (String iriRelated : (LinkedHashSet) term.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + if (term.annotation != null) + for (String iriChild : (LinkedHashSet) term.annotation.getOrDefault("narrower", new LinkedHashSet())) { + V1Term childTerm = findTerm(listOfTerms, iriChild); + TreeNode child = new TreeNode(childTerm); + child.setIndex(tree.getIndex() + "." + ++count); + populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); + tree.addChild(child); + } + } + + public void populateChildrenandRelatedByBroader(V1Term term, TreeNode tree, List listOfTerms) { + if (term.annotation != null) + for (String iriRelated : (LinkedHashSet) term.annotation.getOrDefault("related", new LinkedHashSet())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + for ( V1Term v1Term : listOfTerms) { + if (v1Term.annotation != null) + for (String iriBroader : (LinkedHashSet) v1Term.annotation.getOrDefault("broader",new LinkedHashSet())) + if(term.iri != null) + if (term.iri.equals(iriBroader)) { + TreeNode child = new TreeNode(v1Term); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByBroader(v1Term,child,listOfTerms); + tree.addChild(child); + } + } + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java index d74a1e7d7..772e878ee 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v1/mappers/V1OntologyMapper.java @@ -3,16 +3,15 @@ import com.google.gson.Gson; import com.google.gson.JsonElement; import com.google.gson.JsonObject; +import uk.ac.ebi.spot.ols.model.License; +import uk.ac.ebi.spot.ols.controller.api.v1.TopConceptEnum; import uk.ac.ebi.spot.ols.model.v1.V1Ontology; import uk.ac.ebi.spot.ols.model.v1.V1OntologyConfig; import uk.ac.ebi.spot.ols.repository.transforms.LocalizationTransform; import uk.ac.ebi.spot.ols.repository.v1.JsonHelper; +import java.util.*; import uk.ac.ebi.ols.shared.DefinedFields; - -import java.util.Map; -import java.util.Objects; - import static uk.ac.ebi.ols.shared.DefinedFields.LANGUAGE; public class V1OntologyMapper { @@ -47,7 +46,21 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.tracker = JsonHelper.getString(localizedJson, "tracker"); ontology.config.logo = JsonHelper.getString(localizedJson, "logo"); ontology.config.creators = JsonHelper.getStrings(localizedJson, "creators"); - ontology.config.annotations = gson.fromJson(localizedJson.get("annotations"), Map.class); + List objects = JsonHelper.getObjects(localizedJson,"classifications"); + Set collectionSet = new HashSet(); + Set subjectSet = new HashSet(); + for (JsonObject object : objects){ + if(object.has("collection")) + collectionSet.addAll(JsonHelper.getStrings(object,"collection")); + if(object.has("subject")) + subjectSet.addAll(JsonHelper.getStrings(object,"subject")); + } + ontology.config.collection = collectionSet; + ontology.config.subject = subjectSet; + ontology.config.classifications = gson.fromJson(localizedJson.get("classifications"), Collection.class); + + ontology.config.license = gson.fromJson(localizedJson.get("license"), License.class); + ontology.config.annotations = AnnotationExtractor.extractAnnotations(localizedJson); ontology.config.fileLocation = JsonHelper.getString(localizedJson, "ontology_purl"); ontology.config.oboSlims = localizedJson.has("oboSlims") && localizedJson.get("oboSlims").getAsBoolean(); @@ -65,9 +78,13 @@ public static V1Ontology mapOntology(JsonElement json, String lang) { ontology.config.preferredRootTerms = JsonHelper.getStrings(localizedJson, "preferredRootTerms"); ontology.config.isSkos = localizedJson.has("isSkos") && localizedJson.get("isSkos").getAsBoolean(); + ontology.config.repoUrl = JsonHelper.getString(localizedJson, "repo_url"); + if(ontology.config.isSkos) { + ontology.config.skosNarrower = localizedJson.has("skosNarrower") && localizedJson.get("skosNarrower").getAsBoolean(); + if (localizedJson.has("skosRoot")) + ontology.config.skosRoot = TopConceptEnum.valueOf(localizedJson.get("skosRoot").getAsString()); + } ontology.config.allowDownload = localizedJson.has("allowDownload") && localizedJson.get("allowDownload").getAsBoolean(); - - ontology.status = "LOADED"; ontology.numberOfTerms = Integer.parseInt(JsonHelper.getString(localizedJson, "numberOfClasses")); diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java index eed89cb17..9caba91e7 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2ClassRepository.java @@ -2,10 +2,12 @@ package uk.ac.ebi.spot.ols.repository.v2; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Primary; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.controller.api.v2.helpers.DynamicQueryHelper; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -21,11 +23,8 @@ import static uk.ac.ebi.ols.shared.DefinedFields.*; import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; - +import java.util.*; +@Primary @Component public class V2ClassRepository { @@ -35,6 +34,7 @@ public class V2ClassRepository { @Autowired OlsNeo4jClient neo4jClient; + public OlsFacetedResultsPage find( Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties) throws IOException { @@ -84,7 +84,7 @@ public OlsFacetedResultsPage findByOntologyId( .map(V2Entity::new); } - public V2Entity getByOntologyIdAndIri(String ontologyId, String iri, String lang) throws ResourceNotFoundException { + public V2Entity findByOntologyAndIri(String ontologyId, String iri, String lang) throws ResourceNotFoundException { Validation.validateOntologyId(ontologyId); Validation.validateLang(lang); @@ -176,4 +176,29 @@ public Page getIndividualAncestorsByOntologyId(String ontologyId, Page .map(RemoveLiteralDatatypesTransform::transform) .map(V2Entity::new); } + + + public List allClassesOfOntology(String ontologyId, Boolean obsoletes, Pageable pageable, String lang) throws IOException { + Map> properties = new HashMap<>(); + if(!obsoletes) + properties.put("isObsolete", List.of("false")); + + Page terms = this.findByOntologyId(ontologyId, pageable, lang, null, null, null, false, DynamicQueryHelper.filterProperties(properties)); + List listOfTerms = new ArrayList(); + listOfTerms.addAll(terms.getContent()); + + while(terms.hasNext()) { + terms = findByOntologyId(ontologyId, terms.nextPageable(), lang, null, null, null, false, DynamicQueryHelper.filterProperties(properties)); + listOfTerms.addAll(terms.getContent()); + } + + return listOfTerms; + } + + public List getRelationsAsList(V2Entity entity, String relationType){ + if(entity.any().get(relationType) instanceof String) + return Arrays.asList((String) entity.any().get(relationType)); + else + return (ArrayList) entity.any().getOrDefault(relationType, new ArrayList()); + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java index 283a867f2..2be74cbc7 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2OntologyRepository.java @@ -1,10 +1,14 @@ package uk.ac.ebi.spot.ols.repository.v2; +import com.google.common.collect.Sets; +import com.google.gson.JsonElement; +import com.google.gson.internal.LinkedTreeMap; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Pageable; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.FilterOption; import uk.ac.ebi.spot.ols.model.v2.V2Entity; import uk.ac.ebi.spot.ols.repository.neo4j.OlsNeo4jClient; import uk.ac.ebi.spot.ols.repository.solr.SearchType; @@ -17,11 +21,11 @@ import uk.ac.ebi.spot.ols.repository.v2.helpers.V2DynamicFilterParser; import uk.ac.ebi.spot.ols.repository.v2.helpers.V2SearchFieldsParser; -import java.util.Collection; -import java.util.List; - +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; +import java.util.*; import java.io.IOException; -import java.util.Map; + import static uk.ac.ebi.ols.shared.DefinedFields.*; @@ -36,7 +40,7 @@ public class V2OntologyRepository { public OlsFacetedResultsPage find( - Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties) throws IOException { + Pageable pageable, String lang, String search, String searchFields, String boostFields, boolean exactMatch, Map> properties, Collection schemas,Collection classifications,Collection ontologies,boolean exclusive,FilterOption filterOption) throws IOException { Validation.validateLang(lang); @@ -49,6 +53,14 @@ public OlsFacetedResultsPage find( query.setSearchText(search); query.setExactMatch(exactMatch); query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + System.out.println("0"); + Collection filteredOntologies = filterOntologyIDs(schemas,classifications, ontologies, exclusive, filterOption, lang); + if(filteredOntologies != null){ + for (String ontologyId : filteredOntologies) + Validation.validateOntologyId(ontologyId); + query.addFilter("ontologyId",filteredOntologies, SearchType.CASE_INSENSITIVE_TOKENS); + } + V2SearchFieldsParser.addSearchFieldsToQuery(query, searchFields); V2SearchFieldsParser.addBoostFieldsToQuery(query, boostFields); V2DynamicFilterParser.addDynamicFiltersToQuery(query, properties); @@ -79,7 +91,285 @@ public V2Entity getById(String ontologyId, String lang) throws ResourceNotFoundE ); } + public Set getOntologies(String lang){ + Set entities = new HashSet<>(); + OlsSolrQuery query = new OlsSolrQuery(); + + query.addFilter("type", List.of("ontology"), SearchType.WHOLE_FIELD); + for (JsonElement element : solrClient.getSet(query)) + entities.add(new V2Entity( + LocalizationTransform.transform( + RemoveLiteralDatatypesTransform.transform( + element + ), + lang + ) + )); + return entities; + } + + public LocalDateTime getLastLoaded(Collection ontologies,String lang){ + LocalDateTime lastLoaded = LocalDateTime.MIN; + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS"); + for (V2Entity entity : getOntologies(lang)){ + if (ontologies.contains(entity.any().get("ontologyId").toString())){ + LocalDateTime dateTime = entity.any().get("loaded").toString() != null ? LocalDateTime.parse(entity.any().get("loaded").toString(), formatter) : LocalDateTime.MIN; + if (dateTime.isAfter(lastLoaded)) + lastLoaded = dateTime; + } + } + return lastLoaded; + } + + public Collection filterOntologyIDs(Collection schemas, Collection classifications, Collection ontologies, boolean exclusiveFilter, FilterOption filterOption, String lang){ + if (schemas != null) + schemas.remove(""); + if (classifications != null) + classifications.remove(""); + if(ontologies != null) + ontologies.remove(""); + if((schemas == null || schemas.size() == 0 ) && (classifications == null || classifications.size() == 0 ) && (ontologies == null || ontologies.size() == 0)) + return null; + if ((schemas == null || schemas.size() == 0 ) || (classifications == null || classifications.size() == 0 )) + return ontologies; + Set documents; + if(FilterOption.COMPOSITE == filterOption) + documents = filterComposite(schemas, classifications, exclusiveFilter,lang); + else if (FilterOption.LINEAR == filterOption) + documents = filter(schemas, classifications, exclusiveFilter,lang); + else + documents = filterLicense(schemas, classifications, exclusiveFilter,lang); + Set filteredOntologySet = new HashSet(); + for (V2Entity document : documents){ + filteredOntologySet.add(document.any().get("ontologyId").toString()); + } + System.out.println("filteredOntologySet: "+filteredOntologySet); + if (( ontologies == null || ontologies.size() == 0) && filteredOntologySet.size() > 0) + return filteredOntologySet; + else if (schemas != null) + if ((ontologies == null || ontologies.size() == 0) && (schemas.size() > 0 || classifications.size() > 0 )) + return new HashSet(Arrays.asList("nosuchontologyfound")); + + Set postFilterOntologySet; + + if(ontologies == null){ + ontologies = new HashSet(); + System.out.println("ontologies == null"); + } else { + ontologies = new HashSet(ontologies); + System.out.println("ontologies <> null"); + } + + System.out.println("ontologies: "+ontologies); + if (exclusiveFilter){ + postFilterOntologySet = Sets.intersection(filteredOntologySet,new HashSet(ontologies)); + System.out.println("intersection"); + } else { + postFilterOntologySet = Sets.union(filteredOntologySet,new HashSet(ontologies)); + System.out.println("union"); + } + if(postFilterOntologySet.size() == 0) + postFilterOntologySet = new HashSet(Arrays.asList("nosuchontologyfound")); + return postFilterOntologySet; + } + + public Set filterComposite(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + if(schemas != null && classifications != null) + if(!exclusive) { + for (V2Entity ontologyDocument : getOntologies(lang)) { + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { + for (String schema: schemas) + if(classificationSchema.containsKey(schema)) + for (String classification: classifications) { + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()) + if (classificationSchema.get(schema).contains(classification)) { + tempSet.add(ontologyDocument); + } + } + + } + } + } else if (exclusive && schemas != null && schemas.size() == 1 && classifications != null && classifications.size() == 1) { + String schema = schemas.iterator().next(); + String classification = classifications.iterator().next(); + System.out.println("schema: "+schema); + System.out.println("classification: "+classification); + for (V2Entity ontologyDocument : getOntologies(lang)){ + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) + for(Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")){ + if(classificationSchema.containsKey(schema)) + if (classificationSchema.get(schema) != null) + if (!classificationSchema.get(schema).isEmpty()){ + for (String s :classificationSchema.get(schema)) + System.out.println(s); + if(classificationSchema.get(schema).contains(classification)) + tempSet.add(ontologyDocument); + } + + } + } + } else { + for (V2Entity ontologyDocument : getOntologies(lang)) { + Set tempClassifications = new HashSet(); + if(ontologyDocument.any().get("classifications") != null) + if (!((Collection>>) ontologyDocument.any().get("classifications")).isEmpty()) { + for (Map> classificationSchema : (Collection>>) ontologyDocument.any().get("classifications")) { + for (String schema : schemas) + if (classificationSchema.containsKey(schema)) { + for (String classification : classifications) { + if (classificationSchema.get(schema) != null) { + if (!classificationSchema.get(schema).isEmpty()) { + if (classificationSchema.get(schema).contains(classification)) { + tempClassifications.add(classification); + } + } + } + } + } + } + if (tempClassifications.containsAll(classifications)) + tempSet.add(ontologyDocument); + } + } + } + return tempSet; + } + public Set filter(Collection schemas, Collection classifications, boolean exclusive, String lang){ + if(exclusive) + return exclusiveFilter(schemas,classifications,lang); + else + return inclusiveFilter(schemas,classifications,lang); + } + + public Set inclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + for (String key : ontology.any().keySet()){ + if (schemas.contains(key)){ + if(ontology.any().get(key) != null) + if (ontology.any().get(key) instanceof Collection) { + for (String ontologyClassification : (Collection) ontology.any().get(key)){ + if(classifications.contains(ontologyClassification)) + filteredSet.add(ontology); + } + } else if (ontology.any().get(key) instanceof String) { + if(ontology.any().get(key) != null) + if(classifications.contains(ontology.any().get(key))) + filteredSet.add(ontology); + } + } + } + } + return filteredSet; + } + + public Set exclusiveFilter(Collection schemas, Collection classifications, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + Set fieldSet =ontology.any().keySet(); + if (fieldSet.containsAll(schemas)){ + Set tempClassifications = new HashSet(); + for (String key : ontology.any().keySet()){ + if (ontology.any().get(key) instanceof Collection){ + if(ontology.any().get(key) != null) + for (String classification : classifications){ + if(((Collection) ontology.any().get(key)).contains(classification)) + tempClassifications.add(classification); + } + } else if (ontology.any().get(key) instanceof String) { + if(ontology.any().get(key) != null) + if(classifications.contains((String) ontology.any().get(key))) + tempClassifications.add( (String) ontology.any().get(key)); + } + } + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + } + } + return filteredSet; + } + + public Set filterLicense(Collection schemas, Collection classifications, boolean exclusive, String lang){ + Set tempSet = new HashSet(); + Set filteredSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + + for (V2Entity ontology : tempSet){ + if (ontology.any().keySet().contains("license")){ + LinkedTreeMap license = (LinkedTreeMap) ontology.any().get("license"); + String label = license.get("label") != null ? (String) license.get("label") : ""; + String logo = license.get("logo") != null ? (String) license.get("logo") : ""; + String url = license.get("url") != null ? (String) license.get("url") : ""; + if (exclusive){ + Set tempClassifications = new HashSet(); + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + tempClassifications.add("license.label"); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + tempClassifications.add("license.logo"); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + tempClassifications.add("license.url"); + + if(tempClassifications.containsAll(classifications)) + filteredSet.add(ontology); + + } else { + if (schemas.contains("license.label") && label.length() > 0 && classifications.contains(label)) + filteredSet.add(ontology); + if (schemas.contains("license.logo") && logo.length() > 0 && classifications.contains(logo)) + filteredSet.add(ontology); + if (schemas.contains("license.url") && url.length() > 0 && classifications.contains(url)) + filteredSet.add(ontology); + } + } + } + + return filteredSet; + } + + public Set getSchemaKeys(String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + Set keys = new HashSet<>(); + for (V2Entity ontology : tempSet){ + if (ontology.any().containsKey("classifications")){ + Collection temp = (Collection) ontology.any().get("classifications"); + for (Object o : temp){ + keys.addAll(((Map>) o).keySet()); + } + } + } + return keys; + } + + public Set getSchemaValues(Collection schemas,String lang){ + Set tempSet = new HashSet(); + tempSet.addAll(getOntologies(lang)); + Set values = new HashSet<>(); + for (V2Entity ontology : tempSet){ + if (ontology.any().containsKey("classifications")){ + Collection temp = (Collection) ontology.any().get("classifications"); + for (Object o : temp){ + for (Map.Entry> entry : ((Map>) o).entrySet()) + for (String value : entry.getValue()) + if(schemas.contains(entry.getKey())) + values.add(value); + } + } + } + return values; + } } diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java new file mode 100644 index 000000000..f20f4f183 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/repository/v2/V2SKOSRepository.java @@ -0,0 +1,217 @@ +package uk.ac.ebi.spot.ols.repository.v2; + +import com.google.gson.JsonObject; +import org.springframework.cache.annotation.Cacheable; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Component; +import uk.ac.ebi.spot.ols.model.v2.V2Entity; +import uk.ac.ebi.spot.ols.repository.v1.TreeNode; + +import java.io.IOException; +import java.util.*; + +import static uk.ac.ebi.spot.ols.model.SKOSRelation.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +@Component +public class V2SKOSRepository extends V2ClassRepository { + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#schema).concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTree (String ontologyId, boolean schema, boolean narrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + Map> properties = new HashMap<>(); + if(!obsoletes) + properties.put("isObsolete", List.of("false")); + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + List> rootTerms = new ArrayList>(); + int count = 0; + + if(schema) { + for (V2Entity term : listOfTerms) + if (term.any().get(hasTopConcept.getPropertyName()) != null) { + for (String iriTopConcept : (ArrayList) term.any().get(hasTopConcept.getPropertyName())) { + V2Entity topConceptTerm = findTerm(listOfTerms,iriTopConcept); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + } + rootTerms.add(topConcept); + } + } + } else for (V2Entity term : listOfTerms) { + TreeNode tree = new TreeNode(term); + + if (tree.isRoot() && term.any().get(topConceptOf.getPropertyName()) != null) { + tree.setIndex(String.valueOf(++count)); + if(withChildren) { + if(narrower) + populateChildrenandRelatedByNarrower(term,tree,listOfTerms); + else + populateChildrenandRelatedByBroader(term,tree,listOfTerms); + } + rootTerms.add(tree); + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat(#narrower).concat('-').concat(#withChildren)") + public List> conceptTreeWithoutTop (String ontologyId, boolean isNarrower, boolean withChildren, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + Set rootIRIs = new HashSet(); + List> rootTerms = new ArrayList>(); + int count = 0; + if(!isNarrower) { + for (V2Entity term : listOfTerms) { + if(term.any() != null && term.any().get(broader.getPropertyName()) != null) { + for (String iriBroader : getRelationsAsList(term,broader.getPropertyName())) { + V2Entity broaderTerm = findTerm(listOfTerms, iriBroader); + if (broaderTerm.any() != null && broaderTerm.any().get(broader.getPropertyName()) == null) { + rootIRIs.add(iriBroader); + } + + } + } + } + + for (String iri : rootIRIs) { + V2Entity topConceptTerm = findTerm(listOfTerms, iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(String.valueOf(++count)); + if(withChildren) + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + rootTerms.add(topConcept); + } + + } else { + for (V2Entity term : listOfTerms) { + if (term.any() != null && term.any().get(narrower) != null) { + boolean root = true; + for (V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null && V2Entity.any().get(narrower) != null) { + for (String iriNarrower : getRelationsAsList(V2Entity,narrower.getPropertyName())) { + if (term.any().get("iri").equals(iriNarrower)) + root = false; + } + } + } + + if (root) { + TreeNode topConcept = new TreeNode(term); + topConcept.setIndex(String.valueOf(++count)); + if (withChildren) + populateChildrenandRelatedByNarrower(term, topConcept, listOfTerms); + rootTerms.add(topConcept); + } + } + } + } + + return rootTerms; + } + + @Cacheable(value = "concepttree", key="#ontologyId.concat('-').concat('s').concat('-').concat(#iri).concat('-').concat(#narrower).concat('-').concat(#index)") + public TreeNode conceptSubTree(String ontologyId, String iri, boolean narrower, String index, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + V2Entity topConceptTerm = findTerm(listOfTerms,iri); + TreeNode topConcept = new TreeNode(topConceptTerm); + topConcept.setIndex(index); + if(narrower) + populateChildrenandRelatedByNarrower(topConceptTerm,topConcept,listOfTerms); + else + populateChildrenandRelatedByBroader(topConceptTerm,topConcept,listOfTerms); + + return topConcept; + } + + public V2Entity findTerm(List wholeList, String iri) { + for (V2Entity term : wholeList) + if(term.any().get("iri").equals(iri)) + return term; + return new V2Entity(new JsonObject()); + } + + public List findRelated(String ontologyId, String iri, String relationType, String lang) { + List related = new ArrayList(); + V2Entity term = this.findByOntologyAndIri(ontologyId, iri, lang); + if (term != null) + if (term.any().get(relationType) != null) + for (String iriBroader : getRelationsAsList(term,relationType)) + related.add(this.findByOntologyAndIri(ontologyId, iriBroader, lang)); + return related; + } + + public ListfindRelatedIndirectly(String ontologyId, String iri, String relationType, Boolean obsoletes, String lang, Pageable pageable) throws IOException { + List related = new ArrayList(); + + V2Entity V2Entity = this.findByOntologyAndIri(ontologyId, iri, lang); + if(V2Entity == null) + return related; + if(V2Entity.any().get("iri") == null) + return related; + + List listOfTerms = allClassesOfOntology(ontologyId, obsoletes, pageable, lang); + + for (V2Entity term : listOfTerms) { + if (term != null) + if (term.any().get(relationType) != null) + for (String iriRelated : getRelationsAsList(term,relationType)) + if(iriRelated.equals(iri)) + related.add(term); + } + + return related; + } + + public void populateChildrenandRelatedByNarrower(V2Entity term, TreeNode tree, List listOfTerms ) { + + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + if (term.any() != null) + for (String iriChild : getRelationsAsList(term,narrower.getPropertyName())) { + V2Entity childTerm = findTerm(listOfTerms, iriChild); + TreeNode child = new TreeNode(childTerm); + child.setIndex(tree.getIndex() + "." + ++count); + populateChildrenandRelatedByNarrower(childTerm, child, listOfTerms); + tree.addChild(child); + } + } + + public void populateChildrenandRelatedByBroader(V2Entity term, TreeNode tree, List listOfTerms) { + if (term.any() != null) + for (String iriRelated : getRelationsAsList(term,related.getPropertyName())) { + TreeNode related = new TreeNode(findTerm(listOfTerms, iriRelated)); + related.setIndex(tree.getIndex() + ".related"); + tree.addRelated(related); + } + int count = 0; + for ( V2Entity V2Entity : listOfTerms) { + if (V2Entity.any() != null) + for (String iriBroader : getRelationsAsList(V2Entity,broader.getPropertyName())) + if(term.any().get("iri") != null) + if (term.any().get("iri").equals(iriBroader)) { + TreeNode child = new TreeNode(V2Entity); + child.setIndex(tree.getIndex()+"."+ ++count); + populateChildrenandRelatedByBroader(V2Entity,child,listOfTerms); + tree.addChild(child); + } + } + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java new file mode 100644 index 000000000..01632862e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/KeyValueResultAssembler.java @@ -0,0 +1,18 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import org.springframework.hateoas.EntityModel; +import org.springframework.hateoas.server.RepresentationModelAssembler; +import org.springframework.stereotype.Component; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; + +@Component +public class KeyValueResultAssembler implements RepresentationModelAssembler> { + + @Override + public EntityModel toModel(KeyValueResultDto document) { + EntityModel resource = EntityModel.of(document); + + return resource; + } +} \ No newline at end of file diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java new file mode 100644 index 000000000..9f95b24a9 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallAssembler.java @@ -0,0 +1,18 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import org.springframework.hateoas.EntityModel; +import org.springframework.hateoas.server.RepresentationModelAssembler; +import org.springframework.stereotype.Component; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; + +@Component +public class RestCallAssembler implements RepresentationModelAssembler> { + + @Override + public EntityModel toModel(RestCallDto document) { + EntityModel resource = EntityModel.of(document); + + return resource; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java new file mode 100644 index 000000000..f740d2307 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/controller/RestCallStatistics.java @@ -0,0 +1,213 @@ +package uk.ac.ebi.spot.ols.reststatistics.controller; + +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.web.PageableDefault; +import org.springframework.data.web.PagedResourcesAssembler; +import org.springframework.format.annotation.DateTimeFormat; +import org.springframework.hateoas.PagedModel; +import org.springframework.http.HttpEntity; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.*; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallStatisticsService; +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; + +@RestController +@RequestMapping("/api/rest/statistics") +public class RestCallStatistics { + public static final int DEFAULT_PAGE_SIZE = 20; + + private final RestCallService restCallService; + private final RestCallStatisticsService restCallStatisticsService; + private final RestCallAssembler restCallAssembler; + private final KeyValueResultAssembler keyValueResultAssembler; + + @Autowired + public RestCallStatistics(RestCallService restCallService, + RestCallStatisticsService restCallStatisticsService, + RestCallAssembler restCallAssembler, + KeyValueResultAssembler keyValueResultAssembler) { + this.restCallService = restCallService; + this.restCallStatisticsService = restCallStatisticsService; + this.restCallAssembler = restCallAssembler; + this.keyValueResultAssembler = keyValueResultAssembler; + } + + @Operation(summary = "REST Calls List") + @RequestMapping(value = "", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getList( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + + RestCallRequest request = new RestCallRequest(url, dateTimeFrom, dateTimeTo); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallService.getList(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, restCallAssembler), HttpStatus.OK); + } + + @Operation(summary = "REST Calls statistics by URL") + @RequestMapping(value = "/byUrl", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByUrl( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getRestCallsCountsByAddress(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } + + @Operation(summary = "REST Calls total count") + @RequestMapping(value = "/count", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity getTotalCount( + @RequestParam(name = "url", required = false) String url, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList + ) { + RestCallRequest request = new RestCallRequest( + url, + dateTimeFrom, + dateTimeTo + ); + + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + KeyValueResultDto counts = restCallStatisticsService.getRestCallsTotalCount(request,parameters,intersection); + + return new ResponseEntity<>(counts, HttpStatus.OK); + } + + @Operation(summary = "REST Calls statistics by query parameters and path variables") + @RequestMapping(value = "/byParameter", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByParameter( + @Parameter(description = "Parameter type") + @RequestParam(name = "type", required = false) RestCallParameterType type, + @RequestParam(name = "url", required = false) String url, + @Parameter(description = "Parameter name") + @RequestParam(name = "parameter", required = false) String parameter, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + Optional.ofNullable(type), + Optional.ofNullable(parameter), + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getStatisticsByParameter(request, parameters, intersection,pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } + + @Operation(summary = "REST Calls statistics by date") + @RequestMapping(value = "/byDate", produces = MediaType.APPLICATION_JSON_VALUE, method = RequestMethod.POST) + public HttpEntity> getStatisticsByDate( + @Parameter(description = "Parameter type") + @RequestParam(name = "type", required = false) RestCallParameterType type, + @RequestParam(name = "url", required = false) String url, + @Parameter(description = "Parameter name") + @RequestParam(name = "parameter", required = false) String parameter, + @RequestParam(name = "dateTimeFrom", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeFrom, + @RequestParam(name = "dateTimeTo", required = false) + @DateTimeFormat(iso = DateTimeFormat.ISO.DATE_TIME) LocalDateTime dateTimeTo, + @RequestParam(name="intersection", required=true,defaultValue="false") boolean intersection, + @RequestBody(required = false) List parameterList, + @Parameter(hidden = true) PagedResourcesAssembler assembler, + @PageableDefault(size = DEFAULT_PAGE_SIZE) + @Parameter(hidden = true) Pageable pageable + ) { + RestCallRequest request = new RestCallRequest( + url, + Optional.ofNullable(type), + Optional.ofNullable(parameter), + dateTimeFrom, + dateTimeTo + ); + + List parameters = new ArrayList(); + + if(parameterList != null) { + parameters.addAll(parameterList); + } + + Page page = restCallStatisticsService.getStatisticsByDate(request, parameters, intersection, pageable); + + return new ResponseEntity<>(assembler.toModel(page, keyValueResultAssembler), HttpStatus.OK); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java new file mode 100644 index 000000000..f6c24fe7e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/KeyValueResultDto.java @@ -0,0 +1,30 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +public class KeyValueResultDto { + private String key; + private long value; + + public KeyValueResultDto() { + } + + public KeyValueResultDto(String key, long value) { + this.key = key; + this.value = value; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public long getValue() { + return value; + } + + public void setValue(long value) { + this.value = value; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java new file mode 100644 index 000000000..c9afdf130 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallCountResultDto.java @@ -0,0 +1,22 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.util.List; + +public class RestCallCountResultDto { + List result; + + public RestCallCountResultDto() { + } + + public RestCallCountResultDto(List result) { + this.result = result; + } + + public List getResult() { + return result; + } + + public void setResult(List result) { + this.result = result; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java new file mode 100644 index 000000000..32a909af3 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallDto.java @@ -0,0 +1,68 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.time.LocalDateTime; +import java.util.Set; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public class RestCallDto { + private String id; + private String url; + private Set parameters; + private LocalDateTime createdAt; + + public RestCallDto() { + } + + public RestCallDto(String id, + String url, + Set parameters, + LocalDateTime createdAt) { + this.id = id; + this.url = url; + this.parameters = parameters; + this.createdAt = createdAt; + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Set getParameters() { + return parameters; + } + + public void setParameters(Set parameters) { + this.parameters = parameters; + } + + public LocalDateTime getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public static RestCallDto of(RestCall restCall) { + return new RestCallDto( + restCall.getId(), + restCall.getUrl(), + restCall.getParameters(), + restCall.getCreatedAt() + ); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java new file mode 100644 index 000000000..96d184662 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/dto/RestCallRequest.java @@ -0,0 +1,90 @@ +package uk.ac.ebi.spot.ols.reststatistics.dto; + +import java.time.LocalDateTime; +import java.util.Optional; +import java.util.function.Predicate; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; + +public class RestCallRequest { + private String url; + private Optional type; + private Optional parameterName; + + private LocalDateTime dateTimeFrom; + private LocalDateTime dateTimeTo; + + public RestCallRequest() { + } + + public RestCallRequest(String url, LocalDateTime dateTimeFrom, LocalDateTime dateTimeTo) { + this.url = url; + this.dateTimeFrom = dateTimeFrom; + this.dateTimeTo = dateTimeTo; + } + + public RestCallRequest(String url, + Optional type, + Optional parameterName, + LocalDateTime dateTimeFrom, + LocalDateTime dateTimeTo) { + this.url = url; + this.type = type; + this.parameterName = parameterName; + this.dateTimeFrom = dateTimeFrom; + this.dateTimeTo = dateTimeTo; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public LocalDateTime getDateTimeFrom() { + return dateTimeFrom; + } + + public void setDateTimeFrom(LocalDateTime dateTimeFrom) { + this.dateTimeFrom = dateTimeFrom; + } + + public LocalDateTime getDateTimeTo() { + return dateTimeTo; + } + + public void setDateTimeTo(LocalDateTime dateTimeTo) { + this.dateTimeTo = dateTimeTo; + } + + public Optional getType() { + return type; + } + + public void setType(Optional type) { + this.type = type; + } + + public Optional getParameterName() { + return parameterName; + } + + public void setParameterName(Optional parameterName) { + this.parameterName = parameterName; + } + + public Predicate getParameterNamePredicate() { + return parameterName.isPresent() + ? parameter -> parameterName.get().equalsIgnoreCase(parameter.getName()) + : parameter -> true; + } + + public Predicate getParameterTypePredicate() { + return type.isPresent() + ? type.get().getRestCallParameterPredicate() + : parameter -> true; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java new file mode 100644 index 000000000..7536fcd5c --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/HttpServletRequestInfo.java @@ -0,0 +1,69 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.Objects; +import java.util.Set; + +public class HttpServletRequestInfo { + private String url; + private Set pathVariables; + private Set queryParameters; + private Set headers; + + public HttpServletRequestInfo() { + } + + public HttpServletRequestInfo(String url, + Set pathVariables, + Set queryParameters, + Set headers) { + this.url = url; + this.pathVariables = pathVariables; + this.queryParameters = queryParameters; + this.headers = headers; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public Set getPathVariables() { + return pathVariables; + } + + public void setPathVariables(Set pathVariables) { + this.pathVariables = pathVariables; + } + + public Set getQueryParameters() { + return queryParameters; + } + + public void setQueryParameters(Set queryParameters) { + this.queryParameters = queryParameters; + } + + public Set getHeaders() { + return headers; + } + + public void setHeaders(Set headers) { + this.headers = headers; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + HttpServletRequestInfo that = (HttpServletRequestInfo) o; + return url.equals(that.url) && Objects.equals(pathVariables, that.pathVariables) && Objects.equals(queryParameters, that.queryParameters) && Objects.equals(headers, that.headers); + } + + @Override + public int hashCode() { + return Objects.hash(url, pathVariables, queryParameters, headers); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java new file mode 100644 index 000000000..dc359deba --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCall.java @@ -0,0 +1,95 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import org.springframework.data.annotation.Id; +import org.springframework.data.mongodb.core.mapping.Document; + +import java.time.LocalDateTime; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +@Document(collection = "rest_call") +public class RestCall { + @Id + private String id; + + private String url; + + private Set parameters = new HashSet<>(); + + private LocalDateTime createdAt; + + public RestCall() { + } + + public RestCall(String url) { + this.url = url; + this.createdAt = LocalDateTime.now(); + } + + public RestCall(String url, + Set parameters) { + this.url = url; + this.parameters = parameters; + this.createdAt = LocalDateTime.now(); + } + + public void addParameters(Set set) { + parameters.addAll(set); + } + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getUrl() { + return url; + } + + public void setUrl(String url) { + this.url = url; + } + + public LocalDateTime getCreatedAt() { + return createdAt; + } + + public void setCreatedAt(LocalDateTime createdAt) { + this.createdAt = createdAt; + } + + public Set getParameters() { + return parameters; + } + + public void setParameters(Set parameters) { + this.parameters = parameters; + } + + @Override + public String toString() { + return "RestCall{" + + "id=" + id + + ", url='" + url + '\'' + + ", parameters=" + parameters + + ", createdAt=" + createdAt + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestCall restCall = (RestCall) o; + return id.equals(restCall.id) && url.equals(restCall.url); + } + + @Override + public int hashCode() { + return Objects.hash(id, url); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java new file mode 100644 index 000000000..b703e20dd --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameter.java @@ -0,0 +1,92 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.Objects; + +import javax.persistence.Transient; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +public class RestCallParameter { + private String name; + private String value; + private RestCallParameterType parameterType; + + public RestCallParameter() { + } + + public RestCallParameter(String name, String value, RestCallParameterType parameterType) { + this.name = name; + this.value = value; + this.parameterType = parameterType; + } + + public RestCallParameter(String name, String value, RestCallParameterType parameterType, RestCall restCall) { + this.name = name; + this.value = value; + this.parameterType = parameterType; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public RestCallParameterType getParameterType() { + return parameterType; + } + + public void setParameterType(RestCallParameterType parameterType) { + this.parameterType = parameterType; + } + + @Override + public String toString() { + return "RestCallParameter{" + + "parameterType='" + parameterType + '\'' + + ", name='" + name + '\'' + + ", value='" + value + '\'' + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RestCallParameter that = (RestCallParameter) o; + return name.equals(that.name) && value.equals(that.value) && parameterType == that.parameterType; + } + + @Override + public int hashCode() { + return Objects.hash(name, value, parameterType); + } + + @Transient + @JsonIgnore + public boolean isPathType() { + return RestCallParameterType.PATH.equals(this.parameterType); + } + + @Transient + @JsonIgnore + public boolean isQueryType() { + return RestCallParameterType.QUERY.equals(this.parameterType); + } + + @Transient + @JsonIgnore + public boolean isHeaderType() { + return RestCallParameterType.HEADER.equals(this.parameterType); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java new file mode 100644 index 000000000..e2f9595c5 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/entity/RestCallParameterType.java @@ -0,0 +1,26 @@ +package uk.ac.ebi.spot.ols.reststatistics.entity; + +import java.util.function.Predicate; + +public enum RestCallParameterType { + PATH { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isPathType; + } + }, + QUERY { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isQueryType; + } + }, + HEADER { + @Override + public Predicate getRestCallParameterPredicate() { + return RestCallParameter::isHeaderType; + } + }; + + public abstract Predicate getRestCallParameterPredicate(); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java new file mode 100644 index 000000000..f4990cb78 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepository.java @@ -0,0 +1,9 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import org.springframework.data.mongodb.repository.MongoRepository; + +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; + +public interface RestCallRepository extends MongoRepository, RestCallRepositoryCustom { + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java new file mode 100644 index 000000000..c607ca684 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryCustom.java @@ -0,0 +1,16 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +import java.util.List; + +public interface RestCallRepositoryCustom { + + List query(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + Long count(RestCallRequest request, List parameters, boolean intersection); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java new file mode 100644 index 000000000..32e98dcde --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/repository/RestCallRepositoryImpl.java @@ -0,0 +1,116 @@ +package uk.ac.ebi.spot.ols.reststatistics.repository; + +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Pageable; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.query.Criteria; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.stereotype.Repository; +import org.springframework.web.util.UriUtils; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +@Repository +public class RestCallRepositoryImpl implements RestCallRepositoryCustom { + private final Logger logger = LoggerFactory.getLogger(getClass()); + + private final MongoTemplate mongoTemplate; + + @Autowired + public RestCallRepositoryImpl(MongoTemplate mongoTemplate) { + this.mongoTemplate = mongoTemplate; + } + + @Override + public List query(RestCallRequest request, List parameters, boolean intersection, Pageable pageable) { + Query query = new Query(); + List criteria = new ArrayList<>(); + + addCriteriaByDates(request, criteria); + addCriteriaByUrl(request, criteria); + if (parameters !=null) + if (parameters.size()>0) + addCriteriaByParameter(request, criteria, parameters, intersection); + + + if (!criteria.isEmpty()) { + query.addCriteria(new Criteria().andOperator(criteria.toArray(new Criteria[0]))); + } + + if (Objects.nonNull(pageable)) { + query.with(pageable); + } + + return mongoTemplate.find(query, RestCall.class); + } + + @Override + public Long count(RestCallRequest request, List parameters, boolean intersection) { + Query query = new Query(); + + List criteria = new ArrayList<>(); + + addCriteriaByDates(request, criteria); + addCriteriaByUrl(request, criteria); + if (parameters !=null) + if (parameters.size()>0) + addCriteriaByParameter(request, criteria, parameters, intersection); + + if (!criteria.isEmpty()) { + query.addCriteria(new Criteria().andOperator(criteria.toArray(new Criteria[0]))); + } + + return mongoTemplate.count(query, RestCall.class); + } + + private void addCriteriaByUrl(RestCallRequest request, List criteria) { + if (request.getUrl() != null) { + String url = getDecodedUrl(request); + criteria.add(Criteria.where("url").is(url)); + } + } + + private void addCriteriaByDates(RestCallRequest request, List criteria) { + if (request.getDateTimeFrom() != null) { + criteria.add(Criteria.where("createdAt").gte(request.getDateTimeFrom())); + } + + if (request.getDateTimeTo() != null) { + criteria.add(Criteria.where("createdAt").lte(request.getDateTimeTo())); + } + } + + private void addCriteriaByParameter(RestCallRequest request, List criteria, List parameters, boolean intersection) { + if (parameters != null) { + if (intersection) + criteria.add(Criteria.where("parameters").all(parameters)); + else + criteria.add(Criteria.where("parameters").in(parameters)); + } + + } + + private String getDecodedUrl(RestCallRequest request) { + if (request.getUrl() == null) { + return null; + } + + String decodedUrl = null; + try { + decodedUrl = UriUtils.decode(request.getUrl(), StandardCharsets.UTF_8.toString()); + } catch (Exception e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + } + + return decodedUrl; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java new file mode 100644 index 000000000..60b51171b --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallHandlerService.java @@ -0,0 +1,7 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import javax.servlet.http.HttpServletRequest; + +public interface RestCallHandlerService { + void handle(HttpServletRequest request); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java new file mode 100644 index 000000000..917b6837e --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallParserService.java @@ -0,0 +1,9 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import javax.servlet.http.HttpServletRequest; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; + +public interface RestCallParserService { + HttpServletRequestInfo parse(HttpServletRequest request); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java new file mode 100644 index 000000000..5be46afaa --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallService.java @@ -0,0 +1,22 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public interface RestCallService { + + RestCall save(RestCall entity); + + Page getList(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + List findAll(); + + Long count(RestCallRequest request, List parameters, boolean intersection); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java new file mode 100644 index 000000000..169e59ae9 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/RestCallStatisticsService.java @@ -0,0 +1,21 @@ +package uk.ac.ebi.spot.ols.reststatistics.service; + + +import java.util.List; + +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; + +public interface RestCallStatisticsService { + Page getRestCallsCountsByAddress(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + KeyValueResultDto getRestCallsTotalCount(RestCallRequest request, List parameters, boolean intersection); + + Page getStatisticsByParameter(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); + + Page getStatisticsByDate(RestCallRequest request, List parameters, boolean intersection, Pageable pageable); +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java new file mode 100644 index 000000000..2f33f4258 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallHandlerServiceImpl.java @@ -0,0 +1,43 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import javax.servlet.http.HttpServletRequest; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallHandlerService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallParserService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; + +@Service +public class RestCallHandlerServiceImpl implements RestCallHandlerService { + private final Logger log = LoggerFactory.getLogger(getClass()); + + private final RestCallParserService restCallParserService; + private final RestCallService restCallService; + + @Autowired + public RestCallHandlerServiceImpl(RestCallParserService restCallParserService, + RestCallService restCallService) { + this.restCallParserService = restCallParserService; + this.restCallService = restCallService; + } + + @Override + public void handle(HttpServletRequest request) { + HttpServletRequestInfo requestInfo = restCallParserService.parse(request); + + RestCall restCall = new RestCall(requestInfo.getUrl()); + restCall.addParameters(requestInfo.getPathVariables()); + restCall.addParameters(requestInfo.getQueryParameters()); + restCall.addParameters(requestInfo.getHeaders()); + + RestCall saved = restCallService.save(restCall); + + log.debug("REST Call: {}", saved); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java new file mode 100644 index 000000000..6d10f7a56 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallParserServiceImpl.java @@ -0,0 +1,112 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import javax.servlet.http.HttpServletRequest; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Service; +import org.springframework.web.servlet.HandlerMapping; + +import uk.ac.ebi.spot.ols.reststatistics.entity.HttpServletRequestInfo; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameterType; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallParserService; + +@Service +public class RestCallParserServiceImpl implements RestCallParserService { + private final Logger logger = LoggerFactory.getLogger(getClass()); + private final UrlCyclicDecoder decoder = new UrlCyclicDecoder(); + + @Value("#{'${frontends}'.split(',')}") + private Set frontends = new HashSet<>(); + + @Override + public HttpServletRequestInfo parse(HttpServletRequest request) { + String requestURI = request.getRequestURI(); + requestURI = decoder.decode(requestURI); + + Map pathVariablesMap = (Map) request + .getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE); + + Set pathVariables = new HashSet<>(); + + if (pathVariablesMap != null) + for (Map.Entry entry : pathVariablesMap.entrySet()) { + String parameterName = entry.getKey(); + String parameterValue = decoder.decode(entry.getValue()); + + int startIndex = requestURI.indexOf(parameterValue) - 1; + int endIndex = startIndex + parameterValue.length() + 1; + + if (startIndex >= 0 && requestURI.charAt(startIndex) == '/') { + requestURI = doReplacement(requestURI, parameterName, startIndex, endIndex); + pathVariables.add(new RestCallParameter(parameterName, parameterValue, RestCallParameterType.PATH)); + } + } + + Set queryParameters = new HashSet<>(); + try { + queryParameters = getQueryParameters(request); + } catch (UnsupportedEncodingException e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + } + + Set headers = new HashSet(); + for (Enumeration names = request.getHeaderNames(); names.hasMoreElements();) { + String headerName = (String) names.nextElement(); + if (!headerName.equals("user-agent")) + continue; + + for(Enumeration values = request.getHeaders(headerName); values.hasMoreElements();){ + String headerValue = (String) values.nextElement(); + if(frontends.contains(headerValue)) + headers.add(new RestCallParameter(headerName,headerValue, RestCallParameterType.HEADER)); + } + + } + + return new HttpServletRequestInfo(requestURI, pathVariables, queryParameters, headers); + } + + private String doReplacement(String str, String parameterName, int startIndex, int endIndex) { + return str.substring(0, startIndex + 1) + + String.format("{%s}", parameterName) + + str.substring(endIndex); + } + + private Set getQueryParameters(HttpServletRequest request) throws UnsupportedEncodingException { + Set queryParameters = new HashSet<>(); + + String queryString = request.getQueryString(); + if (StringUtils.isEmpty(queryString)) { + return queryParameters; + } + + queryString = URLDecoder.decode(queryString, StandardCharsets.UTF_8.toString()); + String[] parameters = queryString.split("&"); + for (String parameter : parameters) { + String[] keyValuePair = parameter.split("="); + String[] values = null; + if(keyValuePair.length >1) + if(keyValuePair[1] != null) + if (!keyValuePair[1].isEmpty()) + values = keyValuePair[1].split(","); + if (values != null) + for (String value : values) { + queryParameters.add(new RestCallParameter(keyValuePair[0], value, RestCallParameterType.QUERY)); + } + } + + return queryParameters; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java new file mode 100644 index 000000000..b2852b754 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallServiceImpl.java @@ -0,0 +1,55 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.util.List; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.repository.RestCallRepository; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCall; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; + +@Service +public class RestCallServiceImpl implements RestCallService { + private final RestCallRepository restCallRepository; + + @Autowired + public RestCallServiceImpl(RestCallRepository restCallRepository) { + this.restCallRepository = restCallRepository; + } + + @Override + public RestCall save(RestCall entity) { + + return restCallRepository.save(entity); + } + + @Override + public Page getList(RestCallRequest request, List parameters, boolean intersection, Pageable pageable) { + List list = restCallRepository.query(request, parameters, intersection, pageable); + List dtos = list.stream() + .map(RestCallDto::of) + .collect(Collectors.toList()); + + Long count = restCallRepository.count(request, parameters, intersection); + + return new PageImpl<>(dtos, pageable, count); + } + @Override + public List findAll() { + return restCallRepository.findAll(); + } + + @Override + public Long count(RestCallRequest request, List parameters, boolean intersection) { + + return restCallRepository.count(request,parameters,intersection); + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java new file mode 100644 index 000000000..dae218e03 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/RestCallStatisticsServiceImpl.java @@ -0,0 +1,98 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import java.util.Comparator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.PageImpl; +import org.springframework.data.domain.Pageable; +import org.springframework.stereotype.Service; + +import uk.ac.ebi.spot.ols.reststatistics.dto.KeyValueResultDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallDto; +import uk.ac.ebi.spot.ols.reststatistics.dto.RestCallRequest; +import uk.ac.ebi.spot.ols.reststatistics.entity.RestCallParameter; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallService; +import uk.ac.ebi.spot.ols.reststatistics.service.RestCallStatisticsService; + +@Service +public class RestCallStatisticsServiceImpl implements RestCallStatisticsService { + private final RestCallService restCallService; + + @Autowired + public RestCallStatisticsServiceImpl(RestCallService restCallService) { + this.restCallService = restCallService; + } + + @Override + public Page getRestCallsCountsByAddress(RestCallRequest request, + List parameters, boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + Map countsMap = getCountsMap(page); + + List list = countsMap.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + @Override + public KeyValueResultDto getRestCallsTotalCount(RestCallRequest request, List parameters, + boolean intersection) { + Long count = restCallService.count(request, parameters, intersection); + Long value = Optional.ofNullable(count).orElse(0L); + + return new KeyValueResultDto("total", value); + } + + @Override + public Page getStatisticsByParameter(RestCallRequest request, List parameters, + boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + Map parametersWithCountsMap = page.getContent().stream() + .flatMap(restCallDto -> restCallDto.getParameters().stream()) + .filter(request.getParameterNamePredicate()).filter(request.getParameterTypePredicate()) + .collect(Collectors.groupingBy(RestCallParameter::getValue, Collectors.counting())).entrySet().stream() + .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())).collect(Collectors.toMap( + Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new)); + + List list = parametersWithCountsMap.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + @Override + public Page getStatisticsByDate(RestCallRequest request, List parameters, + boolean intersection, Pageable pageable) { + Page page = restCallService.getList(request, parameters, intersection, pageable); + + LinkedHashMap map = page.getContent().stream() + .collect(Collectors.groupingBy(restCallDto -> restCallDto.getCreatedAt().toLocalDate().toString(), + Collectors.counting())) + .entrySet().stream().sorted(Map.Entry.comparingByKey()).collect(Collectors.toMap(Map.Entry::getKey, + Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new)); + + List list = map.entrySet().stream() + .map(entry -> new KeyValueResultDto(entry.getKey(), entry.getValue())).collect(Collectors.toList()); + + return new PageImpl<>(list, pageable, list.size()); + } + + private Map getCountsMap(Page page) { + Map addressesWithCountsMap = page.getContent().stream() + .collect(Collectors.groupingBy(RestCallDto::getUrl, Collectors.counting())); + + return addressesWithCountsMap.entrySet().stream().sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, + LinkedHashMap::new)); + } + +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java new file mode 100644 index 000000000..32299cd02 --- /dev/null +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/reststatistics/service/impl/UrlCyclicDecoder.java @@ -0,0 +1,35 @@ +package uk.ac.ebi.spot.ols.reststatistics.service.impl; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.UnsupportedEncodingException; +import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; + +public class UrlCyclicDecoder { + public static final int URL_DECODE_TIMES = 3; + + private final Logger logger = LoggerFactory.getLogger(getClass()); + + public String decode(String url) { + if (!url.contains("%")) { + return url; + } + + int count = 0; + String decoded = url; + while (decoded.contains("%") && count < URL_DECODE_TIMES) { + try { + decoded = URLDecoder.decode(decoded, StandardCharsets.UTF_8.toString()); + } catch (UnsupportedEncodingException e) { + logger.error("Could not get query parameters: {}", e.getLocalizedMessage()); + + return url; + } + count++; + } + + return decoded; + } +} diff --git a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java index ebc4107d8..5afa60ad5 100644 --- a/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java +++ b/backend/src/main/java/uk/ac/ebi/spot/ols/service/Neo4jClient.java @@ -6,9 +6,7 @@ import java.util.stream.Collectors; import com.google.common.base.Stopwatch; -import com.google.gson.Gson; -import com.google.gson.JsonElement; -import com.google.gson.JsonParser; +import com.google.gson.*; import org.neo4j.driver.*; import org.neo4j.driver.Record; import org.neo4j.driver.exceptions.NoSuchRecordException; @@ -139,7 +137,7 @@ public Page queryPaginated(String query, String resVar, String coun Page page = new PageImpl<>( result.list().stream() - .map(r -> JsonParser.parseString(r.get(resVar).get("_json").asString())) + .map(r -> parseElementByRecord(r,resVar)) .collect(Collectors.toList()), pageable, count); @@ -147,6 +145,30 @@ public Page queryPaginated(String query, String resVar, String coun return page; } + public JsonElement parseElementByRecord(Record r, String resVar){ + JsonElement parsed = new JsonObject(); + + try { + parsed = JsonParser.parseString(r.get(resVar).get("_json").asString()); + } catch (JsonSyntaxException jse){ + System.out.println("invalid json: "+r.get(resVar).get("_json").asString()); + System.out.println(jse.getMessage() + " - Some suspicious fragments will be removed from json."); + try { + parsed = JsonParser.parseString(r.get(resVar).get("_json").asString().replaceAll("\"\\\\\"", "\"").replaceAll("\\\\\"", "\"")); + } catch (JsonSyntaxException jse2){ + System.out.println("invalid trimmed json: "+r.get(resVar).get("_json").asString().replaceAll("\"\\\\\"", "\"")); + System.out.println(jse2.getMessage() + " - default non-map value will be assigned."); + } + } catch(org.neo4j.driver.exceptions.value.Uncoercible u) { + System.out.println(u.getMessage() + " - Object is tried instead of String. External Array characters are removed. "); + String s = r.get(resVar).get("_json").asObject().toString(); + System.out.println("object json: "+s.substring(1, s.length() - 1)); + parsed = JsonParser.parseString(s.substring(1, s.length() - 1)); + } + + return parsed; + } + public JsonElement queryOne(String query, String resVar, Value parameters) { Session session = getSession(); diff --git a/backend/src/main/resources/application.properties b/backend/src/main/resources/application.properties index 6e85d0e17..bede02e10 100644 --- a/backend/src/main/resources/application.properties +++ b/backend/src/main/resources/application.properties @@ -7,4 +7,10 @@ springdoc.swagger-ui.disable-swagger-default-url=true spring.mvc.throw-exception-if-no-handler-found=true spring.web.resources.add-mappings=false -server.error.whitelabel.enabled=false \ No newline at end of file +server.error.whitelabel.enabled=false + +spring.data.mongodb.host=127.0.0.1 +spring.data.mongodb.port=27017 +spring.data.mongodb.database=ols +spring.data.mongodb.repositories.enabled=true +frontends=TIBCENTRAL,NFDI4CHEM,NFDI4ING diff --git a/dataload/Dockerfile b/dataload/Dockerfile index 49bad6398..355b112b7 100644 --- a/dataload/Dockerfile +++ b/dataload/Dockerfile @@ -3,30 +3,15 @@ # This image, once built, should allow any arbitrary config to be loaded and output neo4j/solr datafiles -FROM maven:3.8-jdk-11 +FROM maven:3.9.6-eclipse-temurin-17 RUN addgroup --system --gid 1000 ols && adduser --system --uid 1000 --ingroup ols ols -# Extract Neo4j and Solr vanilla installs to /opt/neo4j and /opt/solr -# -# We use these only as temporary servers for the dataload. The "real" Neo4j -# and Solr servers are the standard images specified in docker-compose.yml -# -RUN mkdir /opt/neo4j && \ - curl https://dist.neo4j.org/neo4j-community-4.4.9-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j - -RUN mkdir /opt/solr && \ - curl https://archive.apache.org/dist/solr/solr/9.0.0/solr-9.0.0.tgz | tar -xz --strip-components=1 -C /opt/solr - -RUN echo "dbms.security.auth_enabled=false" >> /opt/neo4j/conf/neo4j.conf -RUN echo "dbms.jvm.additional=-Dorg.neo4j.kernel.impl.index.schema.GenericNativeIndexPopulator.blockBasedPopulation=true" >> /opt/neo4j/conf/neo4j.conf - # Copy all the code for dataload into /opt/dataload and build the JARs # RUN mkdir /opt/ols && mkdir /opt/ols/dataload && mkdir /opt/ols/ols-shared - COPY ./dataload /opt/ols/dataload COPY ./ols-shared /opt/ols/ols-shared COPY ./pom.xml /opt/ols @@ -36,6 +21,23 @@ RUN cd /opt/ols/ols-shared && mvn package RUN mvn install:install-file -DcreateChecksum=true -Dpackaging=jar -Dfile=/opt/ols/ols-shared/target/ols4-shared-1.0.0-SNAPSHOT.jar -DgroupId=uk.ac.ebi.spot.ols -DartifactId=ols4-shared -Dversion=1.0.0-SNAPSHOT RUN cd /opt/ols/dataload && mvn package +# Extract Neo4j and Solr vanilla installs to /opt/neo4j and /opt/solr +# +# We use these only as temporary servers for the dataload. The "real" Neo4j +# and Solr servers are the standard images specified in docker-compose.yml +# + +RUN if [ ! -f "/opt/ols/dataload/neo4j-community-5.26.0-unix.tar.gz" ]; \ + then mkdir /opt/neo4j && curl https://dist.neo4j.org/neo4j-community-5.26.0-unix.tar.gz | tar -xz --strip-components=1 -C /opt/neo4j; \ + else mkdir /opt/neo4j && tar -xz -f /opt/ols/dataload/neo4j-community-5.26.0-unix.tar.gz --strip-components=1 -C /opt/neo4j; fi + +RUN if [ ! -f "/opt/ols/dataload/solr-9.7.0.tgz" ]; \ + then mkdir /opt/solr && curl https://archive.apache.org/dist/solr/solr/9.7.0/solr-9.7.0.tgz | tar -xz --strip-components=1 -C /opt/solr; \ + else mkdir /opt/solr && tar -xz -f /opt/ols/dataload/solr-9.7.0.tgz --strip-components=1 -C /opt/solr; fi + +RUN echo "dbms.security.auth_enabled=false" >> /opt/neo4j/conf/neo4j.conf +RUN echo "dbms.jvm.additional=-Dorg.neo4j.kernel.impl.index.schema.GenericNativeIndexPopulator.blockBasedPopulation=true" >> /opt/neo4j/conf/neo4j.conf + # Copy the OLS4 Solr config into our temporary Solr server # RUN rm -rf /opt/solr/server/solr diff --git a/dataload/README.md b/dataload/README.md index 109a7858e..f55b0ba20 100644 --- a/dataload/README.md +++ b/dataload/README.md @@ -16,16 +16,26 @@ Use rdf2json to download all the OWL files, resolve imports, and export JSON fil Now (after about 15 min) you should have a huge file called `foundry_out.json` that contains not only the original config for each ontology loaded from `foundry.json`, but also the ontologies themselves represented in an intermediate JSON format! (Note: the intermediate JSON format is a non-standardised application format totally specific to this tool and is subject to change.) -## Step 2: JSON to CSV *for Neo4j* +## Step 2: Link JSON +Use linker to link the json into a jsonl file. + + java -jar linker/target/linker-1.0-SNAPSHOT.jar --input foundry_out.json --output foundry_out.jsonl + +## Step 3: JSON to CSV *for Neo4j* You can now convert this huge JSON file to a CSV file ready for Neo4j, using json2neo: rm -rf output_csv && mkdir output_csv - java -jar json2neo/target/json2neo-1.0-SNAPSHOT.jar --input foundry_out_flat.json --outDir output_csv + java -jar json2neo/target/json2neo-1.0-SNAPSHOT.jar --input foundry_out_.jsonl --outDir output_csv + +## Step 4: CSV to Neo4j + +Now (after 5-10 mins) you should have a directory full of CSV files. These files are formatted especially for Neo4j. You can load them using `neo4j-admin import` command or the `csv2neo` module. -## Step 3: CSV to Neo4j +### Alternative 1: Neo4j Import Command -Now (after 5-10 mins) you should have a directory full of CSV files. These files are formatted especially for Neo4j. You can load them using `neo4j-admin import`, but you'll need to provide the filename of every single CSV file on the command line, which is boring, so included in this repo is a script called `make_csv_import_cmd.sh` that generates the command line for you. +The Neo4J import command can only be used when initializing a database in the community edition of Neo4J. On the contrary, the enterprise version of Neo4j enables multiple imports which can yield in a more flexible ontology ingestion. +IWhen you are using `neo4j-admin import` command, you'll need to provide the filename of every single CSV file on the command line, which is boring, so included in this repo is a script called `make_csv_import_cmd.sh` that generates the command line for you. neo4j-admin import \ --ignore-empty-strings=true \ @@ -37,10 +47,16 @@ Now (after 5-10 mins) you should have a directory full of CSV files. These files Now you should have a Neo4j database ready to start! -## Step 4: JSON to JSON *for Solr* +### Alternative2: CSV to Neo4J Module: + +The module is flexible and enables you to perform multiple ingestions on a live database. It can be triggered with the following command: + + java -jar csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -i -d output_csv + +## Step 5: JSON to JSON *for Solr* -Similar to how the Neo4j CSV was generated, you can also generate JSON files ready for uploading to SOLR using neo2solr. +Similar to how the Neo4j CSV was generated, you can also generate JSON files ready for uploading to SOLR using `json2solr` which can also be performed on a live Solr instance. - java -jar json2solr/target/json2solr-1.0-SNAPSHOT.jar --input foundry_out_flat.json --outDir output_csv + java -jar json2solr/target/json2solr-1.0-SNAPSHOT.jar --input foundry_out.jsonl --outDir output_csv diff --git a/dataload/configs/3labeledonts.json b/dataload/configs/3labeledonts.json new file mode 100644 index 000000000..119a3fb74 --- /dev/null +++ b/dataload/configs/3labeledonts.json @@ -0,0 +1,137 @@ +{ + "name": "OBO Foundry", + "title": "The OBO Foundry", + "markdown": "kramdown", + "highlighter": "rouge", + "baseurl": "/", + "imgurl": "/images", + "repo": "https://github.com/OBOFoundry/OBOFoundry.github.io/", + "repo_src": "https://github.com/OBOFoundry/OBOFoundry.github.io/blob/master/", + "author": { + "name": "OBO Technical WG" + }, + "ontologies": [ + { + "ontology_purl": "http://purl.obolibrary.org/obo/duo.owl", + "description": "DUO is an ontology which represent data use conditions.", + "homepage": "https://github.com/EBISPOT/DUO", + "id": "duo", + "license": { + "label": "CC-BY", + "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", + "url": "http://creativecommons.org/licenses/by/3.0/" + }, + "title": "The Data Use Ontology", + "tracker": "https://github.com/EBISPOT/DUO/issues", + "preferred_root_term": [ + "http://purl.obolibrary.org/obo/DUO_0000001", + "http://purl.obolibrary.org/obo/DUO_0000017", + "http://purl.obolibrary.org/obo/OBI_0000066" + ], + "label_property": "http://www.w3.org/2000/01/rdf-schema#label", + "synonym_property": [ + "http://purl.obolibrary.org/obo/IAO_0000118", + "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", + "http://www.geneontology.org/formats/oboInOwl#shorthand" + ], + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "classifications": [ + { + "collection": [ + "NFDI4ING", + "NFDI4CHEM", + "ESS" + ] + }, + { + "subject": [ + "Computer Science" + ] + } + ], + "repo_url": "https://github.com/EBISPOT/DUO" + }, + { + "ontology_purl": "http://purl.obolibrary.org/obo/ms.owl", + "description": "A structured controlled vocabulary for the annotation of experiments concerned with proteomics mass spectrometry.", + "homepage": "http://www.psidev.info/groups/controlled-vocabularies", + "id": "ms", + "license": { + "label": "CC-BY", + "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "title": "Mass Spectrometry", + "tracker": "https://github.com/HUPO-PSI/psi-ms-CV/issues", + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "creator": [ + "Yasset Perez-Riverol", + "Matt Chambers", + "Andreas Bertsch", + "Marius Kallhardt", + "Eric Deutsch", + "Fredrik Levander", + "Pierre-Alain Binz", + "Gerhard Mayer", + "Joshua Klein" + ], + "is_foundary": 1, + "preferredPrefix": "MS", + "classifications": [ + { + "collection": [ + "NFDI4CHEM", + "DataPLANT" + ] + }, + { + "subject": [ + "Chemistry" + ] + } + ], + "repo_url": "https://www.psidev.info/groups/controlled-vocabularies" + }, + { + "ontology_purl": "https://raw.githubusercontent.com/tibonto/aeon/main/aeon.owl", + "description": "WIP - NOT READY FOR PRODUCTION - The Academic Event Ontology (AEON) is used to represent information regarding academic events.", + "homepage": "https://github.com/tibonto/aeon", + "id": "aeon", + "license": { + "label": "CC-BY 4.0", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "title": "Academic Event Ontology", + "tracker": "https://github.com/tibonto/aeon/issues", + "preferredPrefix": "AEON", + "base_uri": [ + "https://github.com/tibonto/aeon" + ], + "classifications": [ + { + "collection": null + }, + { + "subject": [ + "History", + "Social sciences", + "Educational science" + ] + } + ], + "definition_property": [ + "http://purl.obolibrary.org/obo/IAO_0000115" + ], + "synonym_property": [ + "http://purl.obolibrary.org/obo/IAO_0000118", + "http://www.geneontology.org/formats/oboInOwl#hasExactSynonym", + "http://www.geneontology.org/formats/oboInOwl#hasSynonym" + ], + "repo_url": "https://github.com/tibonto/aeon" + } + ] +} diff --git a/dataload/configs/skos_ontologies.json b/dataload/configs/skos_ontologies.json new file mode 100644 index 000000000..15f626247 --- /dev/null +++ b/dataload/configs/skos_ontologies.json @@ -0,0 +1,148 @@ +{ + "ontologies": [ + { + "ontology_purl": "https://raw.githubusercontent.com/physh-org/PhySH/master/physh.ttl", + "description": "PhySH (Physics Subject Headings) is a physics classification scheme developed by APS to organize journal, meeting, and other content by topic.", + "homepage": "https://physh.org/", + "id": "PhySH", + "license": { + "label": "CC-0 1.0", + "url": "https://creativecommons.org/publicdomain/zero/1.0/" + }, + "title": "PhySH - Physics Subject Headings", + "tracker": "https://github.com/physh-org/PhySH/issues", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition", + "http://purl.org/dc/terms/description" + ], + "creator": [ + "American Physical Society (https://www.aps.org/)" + ], + "preferredPrefix": "physh", + "hierarchical_property": [ + "http://www.w3.org/2004/02/skos/core#broader", + "https://physh.org/rdf/2018/01/01/core#inDiscipline", + "https://physh.org/rdf/2018/01/01/core#inFacet" + ], + "label_property": "https://physh.org/rdf/2018/01/01/core#prefLabel", + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "base_uri": [ + "https://doi.org/10.29172" + ], + "repo_url": "https://github.com/physh-org/PhySH", + "isSkos": true, + "skosNarrower": false, + "skosRoot": "RELATIONSHIPS" + }, + { + "ontology_purl": "https://raw.githubusercontent.com/astrothesaurus/UAT/master/UAT.rdf", + "title": "Unified Astronomy Thesaurus (UAT)", + "id": "uat", + "preferredPrefix": "uat", + "license": { + "label": "Creative Commons Attribution-ShareAlike 3.0 Unported License", + "url": "https://github.com/astrothesaurus/UAT/blob/master/LICENSE.md" + }, + "mailing_list": "sio-ontology@googlegroups.com", + "description": "The Unified Astronomy Thesaurus (UAT) is an open, interoperable and community-supported thesaurus which unifies existing, divergent, and isolated controlled vocabularies in astronomy and astrophysics into a single high-quality, freely-available open thesaurus formalizing astronomical concepts and their inter-relationships. The UAT builds upon the IAU Thesaurus with major contributions from the Astronomy portions of the thesauri developed by the Institute of Physics Publishing and the American Institute of Physics. The Unified Astronomy Thesaurus will be further enhanced and updated through a collaborative effort involving broad community participation.", + "homepage": "http://astrothesaurus.org", + "creator": [ + "Frey Katie" + ], + "is_foundary": false, + "tracker": "https://github.com/astrothesaurus/UAT/issues", + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "base_uri": [ + "http://astrothesaurus.org/uat" + ], + "synonym_property": [ + "http://www.w3.org/2004/02/skos/core#altLabel" + ], + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "repo_url": "https://github.com/astrothesaurus/UAT", + "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY" + }, +{ + "ontology_purl": "https://raw.githubusercontent.com/dini-ag-kim/hochschulfaechersystematik/master/hochschulfaechersystematik.ttl", + "id": "hsfs", + "title": "Hochschulfächersystematik", + "description": "Diese Hochschulfächersystematik basiert auf der Destatis-Systematik der Fächergruppen, Studienbereiche und Studienfächer (http://bartoc.org/node/18919) und wird gepflegt von der OER-Metadatengruppe der DINI-AG KIM. Die Systematik ist Bestandteil der Spezifikationen LOM for Higher Education OER Repositories und LRMI-Profil (Entwurf).", + "repo_url": "https://github.com/dini-ag-kim/hochschulfaechersystematik", + "preferredPrefix": "hsfs", + "allow_download": true, + "homepage": "https://bartoc.org/en/node/18919", + "base_uri": [ + "https://w3id.org/kim/hochschulfaechersystematik/" + ], + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY", + "creator": [ + "Michael Menzel", + "Adrian Pohl" + ], + "license": { + "label": "freely available", + "url": "http://bartoc.org/en/Access/Free" + } +}, +{ + "ontology_purl": "https://purl.org/fidbaudigital/subjects", + "title": "FID BAUdigital Subject Headings", + "id": "bdsubj", + "preferredPrefix": "bdsubj", + "license": { + "label": "CC-BY 4.0", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "description": "This subject heading system has beeen developed for use in FID BAUdigital and its future web services. It covers scientific fields of Civil Engineering, Architecture and Urban Studies with a special section on digitization. This subject classification has been mapped to several other classification systems. The latest version of the subject classification including these mappings is available at https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation/-/raw/main/Subject_Headings_all_mappings.owl.", + "homepage": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation", + "tracker": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation/-/issues", + "definition_property": [ + "http://www.w3.org/2004/02/skos/core#definition" + ], + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "creator": [ + "Fraunhofer-Informationszentrum Raum und Bau IRB" + ], + "base_uri": [ + "https://purl.org/fidbaudigital/subjects" + ], + "isSkos": true, + "skosNarrower": false, + "skosRoot": "TOPCONCEPTOF_PROPERTY", + "repo_url": "https://gitlab.com/fid-bau/terminologie/fid-baudigital-faecherklassifikation" +}, +{ + "ontology_purl": "https://vocabs-downloads.acdh.oeaw.ac.at/vocabs-main/GeneralConcepts/OeFOS/oefos_disciplines.ttl", + "id": "oefos", + "license": { + "label": "Creative Commons Attribution 4.0 International License.", + "url": "https://creativecommons.org/licenses/by/4.0/" + }, + "title": "The Austrian Fields of Science and Technology Classification (ÖFOS 2012)", + "description": "The Austrian Fields of Science and Technology Classification (ÖFOS 2012) is the Austrian version of the revised international Fields of Science and Technology Classification of the OECD (FOS) published in the Frascati Manual 2015 as Fields of Research and Development (FORD). These fields are adjusted to national needs, whose application for international comparisons is binding, particularly within the European Statistical System. The six major Fields of Science: Natural Sciences; Technical Sciences; Human Medicine, Health Sciences; Agricultural Sciences, Veterinary Medicine; Social Sciences and Humanities remained unchanged in comparison to ÖFOS 2002. In order to ensure international comparability, the previous 2-digit levels from 2002, which are no longer applicable, were replaced by new 3-digit levels (groups) according to the international FOS respectively FORD. These 3-digit levels were provided with further sub-groups (4-digits) taking into account the comments of the international classification. It is therefore feasible that the new Austrian Fields of Science adapt to national peculiarities of the Austrian research activities. The research area with the corresponding 6-digits in alphabetical order serves as a description of the fields of activities and research projects and/or for the coverage of the main scientific activities of a statistical unit in the research and development surveys. (Current revision status: August 2017)", + "homepage": "https://vocabs.dariah.eu/oefos/en/", + "base_uri": [ + "https://vocabs.acdh.oeaw.ac.at/oefosdisciplines/" + ], + "allow_download": true, + "preferredPrefix": "oefos", + "isSkos": true, + "skosNarrower": false, + "skosRoot": "SCHEMA", + "label_property": "http://www.w3.org/2004/02/skos/core#prefLabel", + "creator": [ + "Christoph Hoffmann" + ] +} + ] +} + diff --git a/dataload/configs/testfilter.json b/dataload/configs/testfilter.json new file mode 100644 index 000000000..674e5598d --- /dev/null +++ b/dataload/configs/testfilter.json @@ -0,0 +1 @@ +{"name": "OBO Foundry", "title": "The OBO Foundry", "markdown": "kramdown", "highlighter": "rouge", "baseurl": "/", "imgurl": "/images", "repo": "https://github.com/OBOFoundry/OBOFoundry.github.io/", "repo_src": "https://github.com/OBOFoundry/OBOFoundry.github.io/blob/master/", "author": {"name": "OBO Technical WG"}, "ontologies": [{"ontology_purl": "http://purl.obolibrary.org/obo/iao.owl", "id": "iao", "description": "An ontology of information entities.", "homepage": "https://github.com/information-artifact-ontology/IAO/", "license": {"url": "http://creativecommons.org/licenses/by/4.0/", "label": "CC-BY"}, "title": "Information Artifact Ontology", "tracker": "https://github.com/information-artifact-ontology/IAO/issues", "definition_property": ["http://purl.obolibrary.org/obo/IAO_0000115"], "creator": ["Adam Goldstein", "Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bjoern Peters", "Carlo Torniai", "Chris Mungall", "Chris Stoeckert", "Christian A. Boelling", "Darren Natale", "David Osumi-Sutherland", "Gwen Frishkoff", "Holger Stenzhorn", "James A. Overton", "James Malone", "Jennifer Fostel", "Jie Zheng", "Jonathan Rees", "Larisa Soldatova", "Lawrence Hunter", "Mathias Brochhausen", "Matt Brush", "Melanie Courtot", "Michel Dumontier", "Paolo Ciccarese", "Pat Hayes", "Philippe Rocca-Serra", "Randy Dipert", "Ron Rudnicki", "Satya Sahoo", "Sivaram Arabandi", "Werner Ceusters", "William Duncan", "William Hogan", "Yongqun (Oliver) He"], "classifications": [{"collection": ["NFDI4ING", "NFDI4CHEM", "FAIR Data Spaces"]}, {"subject": ["General"]}]}, {"ontology_purl": "https://raw.githubusercontent.com/micheldumontier/semanticscience/master/ontology/sio/release/sio-release.owl", "title": "Semanticscience Integrated Ontology (SIO)", "id": "sio", "preferredPrefix": "sio", "license": {"label": "CC BY 4.0", "url": "http://creativecommons.org/licenses/by/4.0/"}, "description": "The Semanticscience Integrated Ontology (SIO) provides a simple, integrated ontology of types and relations for rich description of objects, processes and their attributes.", "homepage": "https://github.com/MaastrichtU-IDS/semanticscience", "creator": ["Michel Dumontier"], "is_foundary": false, "base_uri": ["http://semanticscience.org/resource/"], "classifications": [{"collection": ["NFDI4ING", "NFDI4CHEM", "ESS"]}, {"subject": ["General"]}], "repo_url": "https://github.com/MaastrichtU-IDS/semanticscience"}, {"ontology_purl": "http://purl.obolibrary.org/obo/bfo.owl", "description": "The upper level ontology upon which OBO Foundry ontologies are built.", "id": "bfo", "license": {"label": "CC-BY", "logo": "http://mirrors.creativecommons.org/presskit/buttons/80x15/png/by.png", "url": "http://creativecommons.org/licenses/by/4.0/"}, "mailing_list": "https://groups.google.com/forum/#!forum/bfo-discuss", "title": "Basic Formal Ontology", "homepage": "http://ifomis.org/bfo/", "tracker": "https://github.com/BFO-ontology/BFO/issues", "definition_property": ["http://purl.obolibrary.org/obo/IAO_0000115"], "creator": ["Alan Ruttenberg", "Albert Goldfain", "Barry Smith", "Bill Duncan", "Bjoern Peters", "Chris Mungall", "David Osumi-Sutherland", "Fabian Neuhaus", "Holger Stenzhorn", "James A. Overton", "Janna Hastings", "Jie Zheng", "Jonathan Bona", "Larry Hunter", "Leonard Jacuzzo", "Ludger Jansen", "Mark Ressler", "Mathias Brochhausen", "Mauricio Almeida", "Melanie Courtot", "Pierre Grenon", "Randall Dipert", "Ron Rudnicki", "Selja Sepp\u00e4l\u00e4", "Stefan Schulz", "Thomas Bittner", "Werner Ceusters", "Yongqun He"], "is_foundary": true, "preferredPrefix": "BFO", "classifications": [{"collection": ["NFDI4CHEM", "NFDI4ING", "NFDI4CAT", "Foundational Ontologies"]}, {"subject": ["General"]}], "repo_url": "https://github.com/BFO-ontology/BFO"}]} diff --git a/dataload/create_datafiles.sh b/dataload/create_datafiles.sh index ab70b4374..936cd37d6 100755 --- a/dataload/create_datafiles.sh +++ b/dataload/create_datafiles.sh @@ -18,7 +18,7 @@ rm -f $OUTDIR/* echo JAVA_OPTS=$JAVA_OPTS echo rdf2json -java $JAVA_OPTS -DentityExpansionLimit=0 -DtotalEntitySizeLimit=0 -Djdk.xml.totalEntitySizeLimit=0 -Djdk.xml.entityExpansionLimit=0 -jar $SCRIPT_PATH/rdf2json/target/rdf2json-1.0-SNAPSHOT.jar --config "$CONFIG_URL" --output "$JSON_PATH" "${@:3}" +java $JAVA_OPTS -DentityExpansionLimit=0 -DtotalEntitySizeLimit=0 -Djdk.xml.totalEntitySizeLimit=0 -Djdk.xml.entityExpansionLimit=0 -jar $SCRIPT_PATH/rdf2json/target/rdf2json-1.0-SNAPSHOT.jar --config "$CONFIG_URL" --output "$JSON_PATH" --convertToRDF "${@:3}" echo linker java -jar $SCRIPT_PATH/linker/target/linker-1.0-SNAPSHOT.jar --input "$JSON_PATH" --output "$JSON_PATH_LINKED" diff --git a/dataload/csv2neo/dependency-reduced-pom.xml b/dataload/csv2neo/dependency-reduced-pom.xml new file mode 100644 index 000000000..d8516fc87 --- /dev/null +++ b/dataload/csv2neo/dependency-reduced-pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + uk.ac.ebi.spot + csv2neo + csv2neo + 1.0-SNAPSHOT + + + + maven-shade-plugin + + + package + + shade + + + + + false + + + uk.ac.ebi.spot.csv2neo.ImportCSV + + true + + + + + + false + + + + + *:* + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + + + 17 + 17 + + diff --git a/dataload/csv2neo/pom.xml b/dataload/csv2neo/pom.xml new file mode 100644 index 000000000..027771dc5 --- /dev/null +++ b/dataload/csv2neo/pom.xml @@ -0,0 +1,94 @@ + + + 4.0.0 + + uk.ac.ebi.spot + csv2neo + 1.0-SNAPSHOT + csv2neo + jar + + + 17 + 17 + + + + + org.neo4j.driver + neo4j-java-driver + 5.19.0 + + + commons-cli + commons-cli + 1.5.0 + compile + + + + org.apache.commons + commons-csv + 1.11.0 + + + + + + + org.apache.maven.plugins + maven-shade-plugin + + false + + + + + + uk.ac.ebi.spot.csv2neo.ImportCSV + + + true + + + + + + false + + + + + + *:* + + + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + + + + package + + + shade + + + + + + + diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java new file mode 100644 index 000000000..70ba95db6 --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/ImportCSV.java @@ -0,0 +1,291 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.apache.commons.csv.CSVFormat; +import org.apache.commons.csv.CSVRecord; +import org.neo4j.driver.*; +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.stream.Collectors; + +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.CommandLineParser; +import org.apache.commons.cli.DefaultParser; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class ImportCSV { + + public static List listFiles(File[] files) throws IOException { + List fileList = new ArrayList(); + for (File file : files) { + if (file.isDirectory()) { + fileList.addAll(listFiles(file.listFiles())); + } else { + fileList.add(file); + } + } + + return fileList; + } + + public static void executeBatchedNodeQueries(List files, Driver driver, int batchSize, int poolSize, int attempts) throws IOException, InterruptedException { + for (File file : files) { + if (!(file.getName().contains("_ontologies") || file.getName().contains("_properties") + || file.getName().contains("_individuals") || file.getName().contains("_classes")) || !file.getName().endsWith(".csv")) + continue; + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + String[] headers = csvParser.getHeaderNames().toArray(String[]::new); + List> splitRecords = splitList(csvParser.getRecords(),batchSize); + CountDownLatch latch = new CountDownLatch(splitRecords.size()); + ExecutorService executorService = Executors.newFixedThreadPool(poolSize); + for (List records : splitRecords){ + NodeCreationQueryTask task = new NodeCreationQueryTask(driver,latch, records,headers,file,attempts); + executorService.submit(task); + } + latch.await(); + executorService.shutdown(); + } + } + + public static void executeBatchedRelationshipQueries(List files, Driver driver, int batchSize, int poolSize, int attempts) throws IOException, InterruptedException { + for (File file : files) { + if ((!file.getName().contains("_edges")) || !file.getName().endsWith(".csv")) + continue; + + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + String[] headers = csvParser.getHeaderNames().toArray(String[]::new); + List> splitRecords = splitList(csvParser.getRecords(), batchSize); + CountDownLatch latch = new CountDownLatch(splitRecords.size()); + ExecutorService executorService = Executors.newFixedThreadPool(poolSize); + for (List records : splitRecords){ + RelationShipCreationQueryTask task = new RelationShipCreationQueryTask(driver,latch,records,headers,file, attempts); + executorService.submit(task); + } + latch.await(); + executorService.shutdown(); + } + } + + /* + * Files should be the _ontologies.csv files + * */ + public static Map displayIngested(List files, Driver driver) throws IOException { + System.out.println("---Ingestion Summary---"); + Map countRecords = new HashMap(); + for (File file : files){ + Reader reader = Files.newBufferedReader(Paths.get(file.getAbsolutePath())); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + List records = csvParser.getRecords(); + for (CSVRecord record : records){ + try (Session session = driver.session()){ + String ontology = record.get(0).split("\\+")[0]; + var resultN = session.run(countNodesOfOntology(ontology,"ontology")); + int nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_ontologies.csv",nodes); + System.out.println(nodes+" ontologies are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"property")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_properties.csv",nodes); + System.out.println(nodes+" properties are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"individual")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_individuals.csv",nodes); + System.out.println(nodes+" individuals are ingested from "+ontology); + resultN = session.run(countNodesOfOntology(ontology,"class")); + nodes = resultN.next().get("nodes").asInt(); + countRecords.put(ontology+"_classes.csv",nodes); + System.out.println(nodes+" classes are ingested from "+ontology); + var resultR = session.run(countAllRelationshipsOfOntology(ontology)); + int relationships = resultR.next().get("relationships").asInt(); + countRecords.put(ontology+"_edges.csv",relationships); + System.out.println(relationships+" edges are ingested from "+ontology); + } + } + + } + return countRecords; + } + + public static Map displayCSV(List files) throws IOException { + Map records = new HashMap(); + System.out.println("---Ingestion Plan---"); + long noofRelationships = 0; + long noofNodes = 0; + for (File file : files){ + if (file.getName().endsWith("_edges.csv")){ + try { + Path path = Paths.get(file.getAbsolutePath()); + int noofRecords = (int) Files.lines(path).count() - 1; + records.put(file.getName(),noofRecords); + noofRelationships += noofRecords; + System.out.println(noofRecords+" records has been identified in "+file.getName()); + } catch (Exception e) { + e.printStackTrace(); + } + } else if (file.getName().endsWith("_ontologies.csv") || file.getName().endsWith("_properties.csv") || file.getName().endsWith("_classes.csv") || file.getName().endsWith("_individuals.csv")){ + Path path = Paths.get(file.getAbsolutePath()); + Reader reader = Files.newBufferedReader(path); + org.apache.commons.csv.CSVParser csvParser = new org.apache.commons.csv.CSVParser(reader, CSVFormat.POSTGRESQL_CSV.withFirstRecordAsHeader().withTrim()); + int noofRecords = csvParser.getRecords().size(); + int noofNewLines = (int) Files.lines(path).count() -1; + records.put(file.getName(),noofRecords); + noofNodes += noofRecords; + if (noofRecords != noofNewLines) + System.out.println("Warning: "+noofRecords+" records has been identified in contrast to "+noofNewLines+" new lines in "+file.getName()); + else + System.out.println(noofRecords+" records has been identified in "+file.getName()); + } + } + System.out.println("Total number of nodes that will be ingested in csv: " + noofNodes); + System.out.println("Total Number of relationships that will be ingested in csv: " + noofRelationships); + return records; + } + + public static List> splitList(List list, int batchSize) { + List> subLists = new ArrayList<>(); + for (int i = 0; i < list.size(); i += batchSize) { + subLists.add(new ArrayList<>(list.subList(i, Math.min(i + batchSize, list.size())))); + } + return subLists; + } + + public static int deleteFromSession(Session session, String deletionQuery){ + int deletedCount = 0; + try { + System.out.println(deletionQuery); + var resultN = session.run(deletionQuery); + deletedCount = resultN.next().get("deletedCount").asInt(); + } catch (Exception e){ + e.printStackTrace(); + } + return deletedCount; + } + + private static Options getOptions() { + Options options = new Options(); + options.addOption("m", "mode",true, "ingest(i), remove(rm) or display(d) ontologies"); + options.addOption("a", "authenticated",false, "use authentication"); + options.addOption("u", "user",true, "neo4j user name"); + options.addOption("pw", "password",true, "neo4j user password"); + options.addOption("uri", "database_uri",true, "neo4j database uri"); + options.addOption("db", "database",true, "neo4j database name"); + options.addOption("o", "ontologies",true, "ontologies to be removed or displayed by commas"); + options.addOption("d", "directory",true, "neo4j csv import directory"); + options.addOption("bs", "batch_size",true, "batch size for splitting queries into multiple transactions."); + options.addOption("ps", "pool_size",true, "number of threads in the pool"); + options.addOption("t", "attempts",true, "number of attempts for a particular batch"); + options.addOption("l", "limit",true, "number of nodes to be removed"); + options.addOption("lb", "label",true, "node label filter for removal"); + return options; + } + + public static void main(String... args) throws IOException, ParseException { + Options options = getOptions(); + CommandLineParser parser = new DefaultParser(); + CommandLine cmd = parser.parse( options, args); + final String db = cmd.hasOption("db") ? cmd.getOptionValue("db") : "neo4j"; + final String dbUri = cmd.hasOption("uri") ? cmd.getOptionValue("uri") : "neo4j://localhost"; + final String dbUser = cmd.hasOption("u") ? cmd.getOptionValue("u") : "neo4j"; + final String dbPassword = cmd.hasOption("pw") ? cmd.getOptionValue("pw") : "testtest"; + final String directory = cmd.hasOption("d") ? cmd.getOptionValue("d") : "/tmp/out"; + final String ontologyPrefixes = cmd.hasOption("o") ? cmd.getOptionValue("o") : ""; + final int batchSize = cmd.hasOption("bs") && Integer.parseInt(cmd.getOptionValue("bs"))>0 ? Integer.parseInt(cmd.getOptionValue("bs")) : 1000; + final int poolSize = cmd.hasOption("ps") && Integer.parseInt(cmd.getOptionValue("ps"))>0 ? Integer.parseInt(cmd.getOptionValue("ps")) : 20; + final int attempts = cmd.hasOption("t") ? Integer.parseInt(cmd.getOptionValue("t")) : 5; + final int limit = cmd.hasOption("l") ? Integer.parseInt(cmd.getOptionValue("l")) : 1000; + final String label = cmd.hasOption("lb") ? cmd.getOptionValue("lb") : "OntologyEntity"; + + try (var driver = cmd.hasOption("a") ? GraphDatabase.driver(dbUri, AuthTokens.basic(dbUser, dbPassword)) : GraphDatabase.driver(dbUri)) { + driver.verifyConnectivity(); + try (var session = driver.session(SessionConfig.builder().withDatabase(db).build())) { + List indexCommands = new ArrayList<>(); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:Ontology) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyEntity) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyClass) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyProperty) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE CONSTRAINT IF NOT EXISTS FOR (n:OntologyIndividual) REQUIRE n.id IS UNIQUE"); + indexCommands.add("CREATE TEXT INDEX ontology_id IF NOT EXISTS FOR (n:Ontology) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX entity_id IF NOT EXISTS FOR (n:OntologyEntity) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX class_id IF NOT EXISTS FOR (n:OntologyClass) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX property_id IF NOT EXISTS FOR (n:OntologyProperty) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX individual_id IF NOT EXISTS FOR (n:OntologyIndividual) ON (n.id)"); + indexCommands.add("CREATE TEXT INDEX ontology_ont_id IF NOT EXISTS FOR (n:Ontology) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX entity_ont_id IF NOT EXISTS FOR (n:OntologyEntity) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX class_ont_id IF NOT EXISTS FOR (n:OntologyClass) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX property_ont_id IF NOT EXISTS FOR (n:OntologyProperty) ON (n.ontologyId)"); + indexCommands.add("CREATE TEXT INDEX individual_ont_id IF NOT EXISTS FOR (n:OntologyIndividual) ON (n.ontologyId)"); + for (String command : indexCommands) + try{ + session.run(command); + } catch(Exception e){ + System.out.println("Could not create constraint: "+e.getMessage()); + } + + System.out.println("Start Neo4J Modification..."); + if(cmd.hasOption("m")){ + if (cmd.getOptionValue("m").equals("i")){ + File dir = new File(directory); + List files = listFiles(dir.listFiles()); + Map planned = displayCSV(files); + executeBatchedNodeQueries(files,driver,batchSize,poolSize,attempts); + executeBatchedRelationshipQueries(files,driver,batchSize, poolSize,attempts); + Map ingested = displayIngested(files.stream().filter(f -> f.getName().endsWith("_ontologies.csv")).collect(Collectors.toUnmodifiableList()), driver); + Set keys = new HashSet<>(); + keys.addAll(planned.keySet()); + keys.addAll(ingested.keySet()); + for (String key : keys){ + System.out.println("For Key: "+key+" - Planned: "+planned.getOrDefault(key,Integer.valueOf(-1))+" and Ingested: "+ingested.getOrDefault(key,Integer.valueOf(-1))); + } + } else if (cmd.getOptionValue("m").equals("rm")){ + if (!cmd.hasOption("l") && !cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else if (cmd.hasOption("l") && !cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,limit)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else if (!cmd.hasOption("l") && cmd.hasOption("lb")){ + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,label)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } else { + for(String ontology : ontologyPrefixes.split(",")){ + int deletedCount = deleteFromSession(session,generateOntologyDeleteQuery(ontology,label,limit)); + System.out.println(deletedCount+" number of nodes and respective relationships were deleted."); + } + } + } else if (cmd.getOptionValue("m").equals("d")){ + for(String ontology : ontologyPrefixes.split(",")){ + var resultN = session.run(countAllNodesOfOntology(ontology)); + System.out.println("Number of nodes in ontology "+ontology+" is "+resultN.next().get("nodes").asInt()); + var resultR = session.run(countAllRelationshipsOfOntology(ontology)); + System.out.println("Number of relationships in ontology "+ontology+" is "+resultR.next().get("relationships").asInt()); + } + } + } else { + System.out.println("Mode should be i, d, or rm"); + } + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } +} diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java new file mode 100644 index 000000000..fad060887 --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/NodeCreationQueryTask.java @@ -0,0 +1,65 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.apache.commons.csv.CSVRecord; +import org.neo4j.driver.Driver; +import org.neo4j.driver.Session; + +import java.io.File; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; + +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.generateBlankNodeCreationQuery; +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.generateProps; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class NodeCreationQueryTask implements Runnable { + + private final Driver driver; + private CountDownLatch latch; + private final List records; + private final String[] headers; + private final File file; + private final int attempts; + + public NodeCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file, int attempts) { + this.driver = driver; + this.latch = latch; + this.records = records; + this.headers = headers; + this.file = file; + this.attempts = attempts; + } + + @Override + public void run() { + boolean success = false; + for(int i = 0;i { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateBlankNodeCreationQuery(headers, row); + Map params = generateProps(headers, row); + tx.run(query, params); + } + return true; + }); + } + } catch(Exception e) { + System.out.println("Attempt "+i+" error: "+e.getMessage()); + } + } + latch.countDown(); + System.out.println("There are "+latch.getCount()+" remaining node batches."); + if (success) + System.out.println(records.size()+" nodes has been successfully added from "+file.getName()); + else + System.out.println("Warning: "+records.size()+" nodes failed to be added from "+file.getName()); + } +} diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java new file mode 100644 index 000000000..c4021e6d3 --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/QueryGeneration.java @@ -0,0 +1,114 @@ +package uk.ac.ebi.spot.csv2neo; + +import java.util.HashMap; +import java.util.Map; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class QueryGeneration { + + public static String generateBlankNodeCreationQuery(String[] titles, String[] values){ + StringBuilder sb = new StringBuilder(); + if (titles.length == values.length) { + sb.append("CREATE (") + .append(":") + .append("`"+values[1].replace("|","`:`")+"`") + .append(" $props") + .append(")") + .append(" "); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + } + return sb.toString(); + } + + public static Map generateProps(String[] titles, String[] values){ + Map props = new HashMap<>(); + props.put("id",values[0]); + props.put("_json",values[2]); + if (titles.length == values.length) { + for (int i = 3; i < values.length; i++){ + String[] title = titles[i].split(":"); + if (title.length > 1 && title[1].equals("string[]")) { + props.put(title[0].replaceAll("\"\"","\""),values[i].split("\\|")); + } else + props.put(title[0].replaceAll("\"\"","\""),values[i]); + } + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + } + Map params = new HashMap<>(); + params.put( "props", props ); + return params; + } + + public static String generateRelationCreationQuery(String[] titles, String[] values){ + StringBuilder sb = new StringBuilder(); + + if (titles.length == values.length){ + sb.append("MATCH (n"+idToLabel(values[0])+" {id: "+"\'"+values[0]+"\'"+"}),") + .append("(m"+idToLabel(values[2])+" {id: "+"\'"+values[2]+"\'"+"}) ") + .append("WHERE n.id STARTS WITH '"+values[0].split("\\+")[0]+"' AND m.id STARTS WITH '"+values[2].split("\\+")[0]+"' ") + .append("AND '"+values[0].split("\\+")[0]+"' IN n.ontologyId AND '"+values[2].split("\\+")[0]+"' IN m.ontologyId ") + .append("CREATE (n)-[:") + .append("`"+values[1].replace("|","`:`")+"`") + .append("]->(m)"); + } else { + System.out.println("titles and values are not equal"); + System.out.println("titles: "+titles.length + " - values: " +values.length); + } + + return sb.toString(); + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix){ + // Computationally demanding version that show deleted relationships as well as deleted nodes. + // MATCH (n) WHERE n.id STARTS WITH 'ontologyPrefix' OPTIONAL MATCH (n)-[r]-() WITH n, collect(r) AS relationships DETACH DELETE n RETURN COUNT(n) AS deletedCount, relationships + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, String label){ + return "MATCH (n:`"+label+"`) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, int limit){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n SKIP 0 LIMIT "+limit+" DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String generateOntologyDeleteQuery(String ontologyPrefix, String label, int limit){ + return "MATCH (n:`"+label+"`) WHERE n.id STARTS WITH '"+ontologyPrefix+"+' AND '"+ontologyPrefix+"' IN n.ontologyId WITH n SKIP 0 LIMIT "+limit+" DETACH DELETE n RETURN COUNT(n) AS deletedCount"; + } + + public static String countAllRelationshipsOfOntology(String ontologyPrefix) { + return "MATCH (n)-[r]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r) as relationships"; + } + + public static String countRelationshipsOfOntology(String ontologyPrefix, String label) { + return "MATCH (n)-[r:`"+label+"`]-(m) WHERE '"+ontologyPrefix+"' IN n.ontologyId and '"+ontologyPrefix+"' IN m.ontologyId return count(distinct r) as relationships"; + } + + public static String countAllNodesOfOntology(String ontologyPrefix){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' return count(n) as nodes"; + } + + public static String countNodesOfOntology(String ontologyPrefix, String type){ + return "MATCH (n) WHERE n.id STARTS WITH '"+ontologyPrefix+"' AND '"+type+"' IN n.type return count(n) as nodes"; + } + + public static String idToLabel(String id){ + String label = switch (id.split("\\+")[1]) { + case "class" -> ":OntologyClass"; + case "entity" -> ":OntologyEntity"; + case "ontology" -> ":Ontology"; + case "property" -> ":OntologyProperty"; + case "individual" -> ":OntologyIndividual"; + default -> ""; + }; + return label; + } +} diff --git a/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java new file mode 100644 index 000000000..4b69cae1c --- /dev/null +++ b/dataload/csv2neo/src/main/java/uk/ac/ebi/spot/csv2neo/RelationShipCreationQueryTask.java @@ -0,0 +1,62 @@ +package uk.ac.ebi.spot.csv2neo; + +import org.apache.commons.csv.CSVRecord; +import org.neo4j.driver.Driver; +import org.neo4j.driver.Session; + +import java.io.File; +import java.util.List; +import java.util.concurrent.CountDownLatch; + +import static uk.ac.ebi.spot.csv2neo.QueryGeneration.*; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class RelationShipCreationQueryTask implements Runnable { + + private final Driver driver; + private final CountDownLatch latch; + private final List records; + private final String[] headers; + private final File file; + private final int attempts; + + public RelationShipCreationQueryTask(Driver driver, CountDownLatch latch, List records, String[] headers, File file, int attempts) { + this.driver = driver; + this.latch = latch; + this.records = records; + this.headers = headers; + this.file = file; + this.attempts = attempts; + } + + @Override + public void run() { + boolean success = false; + for(int i = 0;i { + for (CSVRecord csvRecord : records) { + String[] row = csvRecord.toList().toArray(String[]::new); + String query = generateRelationCreationQuery(headers,row); + tx.run(query); + } + return true; + }); + } + } catch(Exception e){ + System.out.println("Attempt "+i+" error: "+e.getMessage()); + } + } + latch.countDown(); + System.out.println("There are "+latch.getCount()+" remaining relationship batches."); + if (success) + System.out.println(records.size()+" relationships has been successfully added from "+file.getName()); + else + System.out.println("Warning: "+records.size()+" relationships failed to be added from "+file.getName()); + } +} diff --git a/dataload/dataload.dockersh b/dataload/dataload.dockersh index 8dae8f417..3a2153707 100755 --- a/dataload/dataload.dockersh +++ b/dataload/dataload.dockersh @@ -6,7 +6,6 @@ echo Loading data... rm -rf /opt/solr/server/solr/* cp -r /opt/ols/dataload/solr_config/* /opt/solr/server/solr/ ./load_into_solr.sh /opt/solr /tmp/out -./create_neo4j_indexes.sh /opt/neo4j chmod -R 777 /opt/solr/server/solr/* chmod -R 777 /opt/neo4j/data/* diff --git a/dataload/json2neo/src/main/java/JSON2CSV.java b/dataload/json2neo/src/main/java/JSON2CSV.java index d37d64a42..5fe0cedeb 100644 --- a/dataload/json2neo/src/main/java/JSON2CSV.java +++ b/dataload/json2neo/src/main/java/JSON2CSV.java @@ -43,6 +43,14 @@ public static void main(String[] args) throws IOException { String inputFilePath = cmd.getOptionValue("input"); String outputFilePath = cmd.getOptionValue("outDir"); + File file = new File(outputFilePath); + try { + file.mkdirs(); + file.createNewFile(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } + new NeoConverter(inputFilePath, outputFilePath).convert(); } diff --git a/dataload/json2solr/src/main/java/JSON2Solr.java b/dataload/json2solr/src/main/java/JSON2Solr.java index 728f4768d..fb0baded4 100644 --- a/dataload/json2solr/src/main/java/JSON2Solr.java +++ b/dataload/json2solr/src/main/java/JSON2Solr.java @@ -42,24 +42,15 @@ public static void main(String[] args) throws IOException { String inputFilePath = cmd.getOptionValue("input"); String outPath = cmd.getOptionValue("outDir"); - PrintStream ontologiesWriter = null; - PrintStream classesWriter = null; - PrintStream propertiesWriter = null; - PrintStream individualsWriter = null; - PrintStream autocompleteWriter = null; - - - String ontologiesOutName = outPath + "/ontologies.jsonl"; - String classesOutName = outPath + "/classes.jsonl"; - String propertiesOutName = outPath + "/properties.jsonl"; - String individualsOutName = outPath + "/individuals.jsonl"; - String autocompleteOutName = outPath + "/autocomplete.jsonl"; + File file = new File(outPath); + try { + file.mkdirs(); + file.createNewFile(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } - ontologiesWriter = new PrintStream(ontologiesOutName); - classesWriter = new PrintStream(classesOutName); - propertiesWriter = new PrintStream(propertiesOutName); - individualsWriter = new PrintStream(individualsOutName); - autocompleteWriter = new PrintStream(autocompleteOutName); + Map writers = new HashMap<>(); JsonReader reader = new JsonReader(new InputStreamReader(new FileInputStream(inputFilePath))); @@ -101,10 +92,9 @@ public static void main(String[] args) throws IOException { flattenedClass.put("id", entityId); flattenProperties(_class, flattenedClass); + writeEntity("classes",ontologyId,flattenedClass,outPath,writers); - classesWriter.println(gson.toJson(flattenedClass)); - - writeAutocompleteEntries(ontologyId, entityId, flattenedClass, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedClass, outPath, writers); } reader.endArray(); @@ -126,9 +116,9 @@ public static void main(String[] args) throws IOException { flattenProperties(property, flattenedProperty); - propertiesWriter.println(gson.toJson(flattenedProperty)); + writeEntity("properties",ontologyId,flattenedProperty,outPath,writers); - writeAutocompleteEntries(ontologyId, entityId, flattenedProperty, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedProperty,outPath,writers); } reader.endArray(); @@ -150,9 +140,9 @@ public static void main(String[] args) throws IOException { flattenProperties(individual, flattenedIndividual); - individualsWriter.println(gson.toJson(flattenedIndividual)); + writeEntity("individuals",ontologyId,flattenedIndividual,outPath,writers); - writeAutocompleteEntries(ontologyId, entityId, flattenedIndividual, autocompleteWriter); + writeAutocompleteEntries(ontologyId, entityId, flattenedIndividual,outPath,writers); } reader.endArray(); @@ -179,7 +169,7 @@ public static void main(String[] args) throws IOException { flattenProperties(ontology, flattenedOntology); - ontologiesWriter.println(gson.toJson(flattenedOntology)); + writeEntity("ontologies",ontologyId,flattenedOntology,outPath,writers); reader.endObject(); // ontology } @@ -195,6 +185,26 @@ public static void main(String[] args) throws IOException { reader.endObject(); reader.close(); + for (PrintStream printStream : writers.values()) + printStream.close(); + } + + static private void writeEntity(String type, String ontologyId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { + if(writers.containsKey(ontologyId+"_"+type)) + writers.get(ontologyId+"_"+type).println(gson.toJson(flattenedEntity)); + else { + writers.put(ontologyId+"_"+type,new PrintStream(outPath+"/"+ontologyId+"_"+type+".jsonl")); + writers.get(ontologyId+"_"+type).println(gson.toJson(flattenedEntity)); + } + } + + static private void writeAutocomplete(String ontologyId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { + if(writers.containsKey(ontologyId+"_autocomplete")) + writers.get(ontologyId+"_autocomplete").println(gson.toJson(flattenedEntity, Map.class)); + else { + writers.put(ontologyId+"_autocomplete",new PrintStream(outPath+"/"+ontologyId+"_autocomplete.jsonl")); + writers.get(ontologyId+"_autocomplete").println(gson.toJson(flattenedEntity, Map.class)); + } } static private void flattenProperties(Map properties, Map flattened) { @@ -236,24 +246,24 @@ static private void flattenProperties(Map properties, Map flattenedEntity, PrintStream autocompleteWriter) { + static void writeAutocompleteEntries(String ontologyId, String entityId, Map flattenedEntity, String outPath, Map writers) throws FileNotFoundException { Object labels = flattenedEntity.get(LABEL.getText()); - if (labels instanceof String) { - labels = (new ArrayList<>()).add(labels); - } - - for(Object label : (List) labels) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)label), Map.class) ); - } + if(labels instanceof List) { + for(Object label : (List) labels) { + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)label),outPath,writers); + } + } else if(labels instanceof String) { + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)labels),outPath,writers); + } Object synonyms = flattenedEntity.get(SYNONYM.getText()); if(synonyms instanceof List) { for(Object label : (List) synonyms) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)label), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)label),outPath,writers); } } else if(synonyms instanceof String) { - autocompleteWriter.println( gson.toJson(makeAutocompleteEntry(ontologyId, entityId, (String)synonyms), Map.class) ); + writeAutocomplete(ontologyId,makeAutocompleteEntry(ontologyId, entityId, (String)synonyms),outPath,writers); } } diff --git a/dataload/load_into_neo4j.sh b/dataload/load_into_neo4j.sh index 21bc5b286..aa2142e7f 100755 --- a/dataload/load_into_neo4j.sh +++ b/dataload/load_into_neo4j.sh @@ -5,17 +5,18 @@ if [ $# == 0 ]; then exit 1 fi +SCRIPT_PATH=$(dirname $(readlink -f $0)) +[[ -z "${BATCH_SIZE}" ]] && bs=1000 || bs="${BATCH_SIZE}" +[[ -z "${POOL_SIZE}" ]] && ps=20 || ps="${POOL_SIZE}" +[[ -z "${NOOF_ATTEMPTS}" ]] && t=5 || t="${NOOF_ATTEMPTS}" + rm -rf $1/data/databases/neo4j rm -rf $1/data/transactions/neo4j -$1/bin/neo4j-admin import \ - --ignore-empty-strings=true \ - --legacy-style-quoting=false \ - --multiline-fields=true \ - --read-buffer-size=16777216 \ - --array-delimiter="|" \ - --database=neo4j \ - --processors=16 \ - $(./make_csv_import_cmd.sh $2) +$1/bin/neo4j start +sleep 20 +echo "csv2neo with batch size $bs and pool size $ps" and number of attempts $t +java -jar $SCRIPT_PATH/csv2neo/target/csv2neo-1.0-SNAPSHOT.jar -m i -d $2 -bs $bs -ps $ps -t $t + diff --git a/dataload/load_into_neo4j_with_import_tool.sh b/dataload/load_into_neo4j_with_import_tool.sh new file mode 100644 index 000000000..0c5305687 --- /dev/null +++ b/dataload/load_into_neo4j_with_import_tool.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +if [ $# == 0 ]; then + echo "Usage: $0 " + exit 1 +fi + +rm -rf $1/data/databases/neo4j +rm -rf $1/data/transactions/neo4j + +$1/bin/neo4j-admin database import full \ + --ignore-empty-strings=true \ + --legacy-style-quoting=false \ + --multiline-fields=true \ + --read-buffer-size=16777216 \ + --array-delimiter="|" \ + --threads=16 \ + $(./make_csv_import_cmd.sh $2) + +$1/bin/neo4j-admin database info neo4j + + + diff --git a/dataload/load_into_solr.sh b/dataload/load_into_solr.sh index aae049a28..86e7d65fb 100755 --- a/dataload/load_into_solr.sh +++ b/dataload/load_into_solr.sh @@ -8,33 +8,24 @@ fi $1/bin/solr start -force -Djetty.host=127.0.0.1 sleep 10 -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/ontologies.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/classes.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/properties.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/individuals.jsonl \ - http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true - -wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $2/autocomplete.jsonl \ - http://127.0.0.1:8983/solr/ols4_autocomplete/update/json/docs?commit=true - +FILES=$2/*_*.jsonl +for f in $FILES +do + echo "$f" + if [[ $f == *_ontologies.jsonl ]] || [[ $f == *_classes.jsonl ]] || [[ $f == *_properties.jsonl ]] || [[ $f == *_individuals.jsonl ]]; then + echo 'entity' + wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $f http://127.0.0.1:8983/solr/ols4_entities/update/json/docs?commit=true + elif [[ $f == *_autocomplete.jsonl ]]; then + echo 'autocomplete' + wget --method POST --no-proxy -O - --server-response --content-on-error=on --header="Content-Type: application/json" --body-file $f http://127.0.0.1:8983/solr/ols4_autocomplete/update/json/docs?commit=true + fi +done sleep 5 - +echo 'update entities' wget --no-proxy http://127.0.0.1:8983/solr/ols4_entities/update?commit=true - sleep 5 - +echo 'update autocomplete' wget --no-proxy http://127.0.0.1:8983/solr/ols4_autocomplete/update?commit=true - -sleep 5 +echo 'loading solr finished' $1/bin/solr stop - - - - diff --git a/dataload/pom.xml b/dataload/pom.xml index 8c7727682..e6e2378f4 100644 --- a/dataload/pom.xml +++ b/dataload/pom.xml @@ -13,6 +13,7 @@ linker json2solr json2neo + csv2neo extras diff --git a/dataload/rdf2json/pom.xml b/dataload/rdf2json/pom.xml index f26c84522..d16987aa2 100644 --- a/dataload/rdf2json/pom.xml +++ b/dataload/rdf2json/pom.xml @@ -44,6 +44,11 @@ logback-classic 1.4.11 + + org.obolibrary.robot + robot-core + 1.9.6 + diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java new file mode 100644 index 000000000..98ce26444 --- /dev/null +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyConversion.java @@ -0,0 +1,172 @@ +package uk.ac.ebi.rdf2json; + +import org.obolibrary.robot.IOHelper; +import org.semanticweb.owlapi.apibinding.OWLManager; +import org.semanticweb.owlapi.formats.*; +import org.semanticweb.owlapi.model.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.ssl.HttpsURLConnection; +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Path; +import java.nio.file.Paths; + +/** + * @author Erhun Giray TUNCAY + * @email giray.tuncay@tib.eu + * TIB-Leibniz Information Center for Science and Technology + */ +public class OntologyConversion { + private static final Logger logger = LoggerFactory.getLogger(OntologyConversion.class); + + private OWLOntology ontology; + + private String extOriginal; + + private String extConverted; + + public OntologyConversion(String url, String id, OWLDocumentFormat convertedFormat) throws IOException { + convertOntologyToRDF(url,id,convertedFormat); + } + + public OWLOntology getOntology() { + return ontology; + } + + public String getExtOriginal() { + return extOriginal; + } + + public String getExtConverted() { + return extConverted; + } + + private void convertOntologyToRDF(String url, String outputFile, OWLDocumentFormat convertedFormat) throws IOException { + FileOutputStream fos = null; + OWLOntology ont = loadOntology(url); + try { + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + extOriginal = getExtension(format); + extConverted = getExtension(convertedFormat); + if (extOriginal.equals(extConverted)){ + extOriginal = extOriginal+"1"; + extConverted = extConverted+"2"; + } + if (format instanceof OBODocumentFormat){ + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+extOriginal); + fos = getFileOutPutStreamForExecutionPath(outputFile+extOriginal); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+extConverted); + String filePath = resourceDirectory.resolve(outputFile+extConverted).toString(); + IOHelper iohelper = new IOHelper(); + iohelper.saveOntology(ont,convertedFormat, IRI.create(new File(filePath)),true); + ont = loadOntology("file:"+filePath); + } else { + logger.info("Saving the original "+format.getKey()+" format ontology to "+outputFile+extOriginal); + fos = getFileOutPutStreamForExecutionPath(outputFile+extOriginal); + ont.saveOntology(format, fos); + logger.info("Saving the converted RDF/XML Syntax format ontology to "+outputFile+extConverted); + fos = getFileOutPutStreamForExecutionPath(outputFile+extConverted); + ont.saveOntology(new RDFXMLDocumentFormat(), fos); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile+extConverted).toString(); + ont = loadOntology("file:"+filePath); + } + } catch (IOException e) { + throw new RuntimeException(e); + } catch (OWLOntologyStorageException e) { + throw new RuntimeException(e); + } catch (Exception e) { + throw new RuntimeException(e); + } finally { + if (fos != null) + fos.close(); + } + ontology = ont; + } + + private OWLOntology loadOntology(String url) throws IOException { + OWLOntologyManager ontManager = OWLManager.createOWLOntologyManager(); + OWLOntology ont; + InputStream is = null; + URLConnection con = null; + try { + try { + URL tempURL = new URL(url); + con = tempURL.openConnection(); + is = tempURL.openStream(); + } catch (IOException e) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + } catch (IOException ioe) { + ioe.printStackTrace(); + } + } + + try { + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e) { + url = replaceURLByProtocol(con, url); + try { + is = new URL(url).openStream(); + ont = ontManager.loadOntologyFromOntologyDocument(is); + } catch (Exception e2) { + ont = ontManager.loadOntologyFromOntologyDocument(IRI.create(url)); + } + } + } catch (OWLOntologyCreationException e) { + throw new RuntimeException(e); + } finally { + if (is != null) + is.close(); + } + return ont; + } + + private FileOutputStream getFileOutPutStreamForExecutionPath(String outputFile) { + FileOutputStream fos; + try { + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + String filePath = resourceDirectory.resolve(outputFile).toString(); + fos = new FileOutputStream(filePath); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + return fos; + } + + private String replaceURLByProtocol(URLConnection con, String url) { + if (con instanceof HttpsURLConnection) { + url = url.replace("https:", "http:"); + } else if (con instanceof HttpURLConnection) { + url = url.replace("http:", "https:"); + } + return url; + } + + private String getExtension(OWLDocumentFormat format) throws IllegalArgumentException { + String ext = ".txt"; + if (format instanceof OBODocumentFormat) + ext = ".obo"; + else if (format instanceof RDFXMLDocumentFormat) + ext = ".owl"; + else if (format instanceof TurtleDocumentFormat) + ext = ".ttl"; + else if (format instanceof OWLXMLDocumentFormat) + ext = ".owx"; + else if (format instanceof ManchesterSyntaxDocumentFormat) + ext = ".omn"; + else if (format instanceof FunctionalSyntaxDocumentFormat) + ext = ".ofn"; + return ext; + } +} diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java index d8b94d3b6..01d60fb13 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/OntologyGraph.java @@ -3,6 +3,7 @@ import com.google.gson.stream.JsonWriter; import org.apache.jena.riot.RDFLanguages; +import org.semanticweb.owlapi.formats.*; import uk.ac.ebi.rdf2json.annotators.*; import uk.ac.ebi.rdf2json.helpers.RdfListEvaluator; import uk.ac.ebi.rdf2json.properties.*; @@ -19,7 +20,9 @@ import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; +import java.net.URISyntaxException; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.stream.Collectors; @@ -29,6 +32,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.semanticweb.owlapi.model.*; + + public class OntologyGraph implements StreamRDF { public Map config; @@ -56,12 +62,12 @@ private RDFParserBuilder createParser(Lang lang) { } } - private void parseRDF(String url) { + private void parseRDF(String url, boolean convertToRDF, String id) { try { if (loadLocalFiles && !url.contains("://")) { logger.debug("Using local file for {}", url); - sourceFileTimestamp = new File(url).lastModified(); + sourceFileTimestamp = new File(url).lastModified(); createParser(RDFLanguages.filenameToLang(url, Lang.RDFXML)) .source(new FileInputStream(url)).parse(this); } else { @@ -70,7 +76,7 @@ private void parseRDF(String url) { try { FileInputStream is = new FileInputStream(existingDownload); logger.debug("Using predownloaded file for {}", url); - sourceFileTimestamp = new File(existingDownload).lastModified(); + sourceFileTimestamp = new File(existingDownload).lastModified(); Lang lang = null; try { String existingDownloadMimeType = Files.readString(Paths.get(existingDownload + ".mimetype")); @@ -83,17 +89,42 @@ private void parseRDF(String url) { createParser(lang).source(is).parse(this); } catch (Exception e) { logger.error("Downloading (not predownloaded) {}", url); - sourceFileTimestamp = System.currentTimeMillis(); + sourceFileTimestamp = System.currentTimeMillis(); createParser(null).source(url).parse(this); } } else { logger.debug("Downloading (no predownload path provided) {}", url); - sourceFileTimestamp = System.currentTimeMillis(); - createParser(null).source(url).parse(this); + try { + logger.info("url: "+url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(null).source(url).parse(this); + } catch (Exception e){ + logger.error("Parsing exception: {}",e.getMessage()); + if(convertToRDF){ + logger.info("converting the ontology to RDF alternatively"); + OntologyConversion conversion = new OntologyConversion(url, id, new RDFXMLDocumentFormat()); + OWLOntology ont = conversion.getOntology(); + OWLDocumentFormat format = ont.getOWLOntologyManager().getOntologyFormat(ont); + logger.info("parsing "+id+" ontology in format: "+format.getKey()); + Path resourceDirectory = Paths.get(OntologyGraph.class.getProtectionDomain().getCodeSource().getLocation().toURI()).getParent(); + url = Paths.get(resourceDirectory.resolve(id+conversion.getExtConverted()).toUri()).toString(); + logger.info("url of the converted ontology: "+url); + sourceFileTimestamp = System.currentTimeMillis(); + createParser(Lang.RDFXML).source(url).parse(this); + } else { + logger.debug("You may alternatively try to use convertToRDF mode to parse your ontology"); + e.printStackTrace(); + } + + } } } } catch (FileNotFoundException e) { throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } } @@ -101,13 +132,24 @@ private String urlToFilename(String url) { return url.replaceAll("[^a-zA-Z0-9\\.\\-]", "_"); } + public static String removeExtension(String fileName) { + if (fileName == null || fileName.isEmpty()) { + return fileName; + } + int lastDotIndex = fileName.lastIndexOf('.'); + if (lastDotIndex == -1) { + return fileName; // No extension found + } + return fileName.substring(0, lastDotIndex); + } + private boolean loadLocalFiles; String downloadedPath; - OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath) { + OntologyGraph(Map config, boolean loadLocalFiles, boolean noDates, String downloadedPath, boolean convertToRDF) { this.loadLocalFiles = loadLocalFiles; this.downloadedPath = downloadedPath; @@ -146,7 +188,7 @@ private String urlToFilename(String url) { } logger.debug("load ontology from: {}", url); - parseRDF(url); + parseRDF(url, convertToRDF, config.getOrDefault("id","result").toString()); // Before we evaluate imports, mark all the nodes so far as not imported for(String id : nodes.keySet()) { @@ -157,12 +199,16 @@ private String urlToFilename(String url) { } + List imported = new ArrayList<>(); while(importUrls.size() > 0) { String importUrl = importUrls.get(0); importUrls.remove(0); + if (imported.contains(importUrl)) + continue; logger.debug("import: {}", importUrl); - parseRDF(importUrl); + parseRDF(importUrl, convertToRDF,config.getOrDefault("id","result").toString()+"_"+removeExtension((importUrl.substring(importUrl.lastIndexOf('/') + 1)))); + imported.add(importUrl); } // Now the imports are done, mark everything else as imported @@ -300,7 +346,10 @@ public void write(JsonWriter writer) throws Throwable { writer.value(ontologyId); writer.name("iri"); - writer.value(ontologyNode.uri); + if(ontologyNode.uri != null) + writer.value(ontologyNode.uri); + else + writer.value(config.get("ontology_purl").toString()); for (String configKey : config.keySet()) { Object configVal = config.get(configKey); @@ -398,7 +447,7 @@ public void write(JsonWriter writer) throws Throwable { writer.endObject(); } catch (Throwable t) { - logger.error("Error in writing ontology with id = {}", ontologyId, t); + logger.error("Error in writing ontology with id = {}", ontologyId); throw t; } } @@ -577,8 +626,8 @@ public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable if (uriNode != null && !isXMLBuiltinDatatype(uri) && uriNode.types.contains(OntologyNode.NodeType.DATATYPE)) { // special case for rdfs:Datatype; nest it as with a bnode instead of referencing writeNode(writer, uriNode, Set.of("datatype")); - } else { - writer.value(uri); + }else { + writer.value(uri); } break; case RELATED: @@ -613,7 +662,7 @@ public void writeValue(JsonWriter writer, PropertyValue value) throws Throwable break; } } catch (Throwable t) { - logger.error("Error writing property value {}", value, t); + logger.error("Error writing property value {}", value); throw t; } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java index 2417157fd..86765c5a3 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/RDF2JSON.java @@ -43,7 +43,7 @@ public static void main(String[] args) throws IOException { Option output = new Option(null, "output", true, "JSON output filename"); output.setRequired(true); options.addOption(output); - + Option loadLocalFiles = new Option(null, "loadLocalFiles", false, "Whether or not to load local files (unsafe, for testing)"); loadLocalFiles.setRequired(false); options.addOption(loadLocalFiles); @@ -52,6 +52,10 @@ public static void main(String[] args) throws IOException { noDates.setRequired(false); options.addOption(noDates); + Option rdfConvert = new Option(null, "convertToRDF", false, "Whether or not to convert the ontology to RDF/Xml format before parsing."); + rdfConvert.setRequired(false); + options.addOption(rdfConvert); + CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; @@ -73,6 +77,7 @@ public static void main(String[] args) throws IOException { boolean bLoadLocalFiles = cmd.hasOption("loadLocalFiles"); boolean bNoDates = cmd.hasOption("noDates"); String mergeOutputWith = cmd.getOptionValue("mergeOutputWith"); + boolean convertToRDF = cmd.hasOption("convertToRDF"); logger.debug("Configs: {}", configFilePaths); @@ -144,7 +149,7 @@ public static void main(String[] args) throws IOException { try { - OntologyGraph graph = new OntologyGraph(ontoConfig, bLoadLocalFiles, bNoDates, downloadedPath); + OntologyGraph graph = new OntologyGraph(ontoConfig, bLoadLocalFiles, bNoDates, downloadedPath, convertToRDF); if(graph.ontologyNode == null) { logger.error("No Ontology node found; nothing will be written"); @@ -179,10 +184,10 @@ public static void main(String[] args) throws IOException { actualReader.beginObject(); while (scanReader.peek() != JsonToken.END_OBJECT) { - + String name = scanReader.nextName(); actualReader.nextName(); - + if (name.equals("ontologies")) { scanReader.beginArray(); diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java index cef53cb4a..3fe9a43aa 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/DisjointWithAnnotator.java @@ -30,7 +30,7 @@ public static void annotateDisjointWith(OntologyGraph graph) { for(OntologyNode classNodeA : classNodes) { for(OntologyNode classNodeB : classNodes) { - if(classNodeB.uri != classNodeA.uri) { + if(classNodeA != null && classNodeB != null && classNodeB.uri != classNodeA.uri) { classNodeA.properties.addProperty("http://www.w3.org/2002/07/owl#disjointWith", PropertyValueURI.fromUri(classNodeB.uri)); } @@ -52,13 +52,13 @@ public static void annotateDisjointWith(OntologyGraph graph) { } } } - + } else if (c.types.contains(OntologyNode.NodeType.ALL_DIFFERENT)) { PropertyValue membersList = c.properties.getPropertyValue("http://www.w3.org/2002/07/owl#distinctMembers"); - List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); - - List individualNodes = members.stream() + if (membersList != null) { + List members = RdfListEvaluator.evaluateRdfList(graph.getNodeForPropertyValue(membersList), graph); + List individualNodes = members.stream() .map(val -> graph.getNodeForPropertyValue(val)) .filter(val -> val != null) .collect(Collectors.toList()); @@ -71,6 +71,7 @@ public static void annotateDisjointWith(OntologyGraph graph) { } } } + } } } diff --git a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java index b0c10d34d..d83454f1c 100644 --- a/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java +++ b/dataload/rdf2json/src/main/java/uk/ac/ebi/rdf2json/annotators/RelatedAnnotator.java @@ -189,10 +189,20 @@ private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_someValu OntologyNode fillerNode = graph.nodes.get(fillerUri); if(fillerNode != null) { // sometimes filler not included in ontology, e.g. "subClassOf some xsd:float" in cdao - - relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); - relatedInfo.addRelatedFrom(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); - + PropertyValue someValuesFrom = null; + if(fillerRestriction != null) + someValuesFrom = fillerRestriction.properties.getPropertyValue("http://www.w3.org/2002/07/owl#someValuesFrom"); + + if(someValuesFrom != null) { + if(!((PropertyValueURI) someValuesFrom).getUri().equalsIgnoreCase(fillerUri)) { + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + relatedInfo.addRelatedFrom(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + } + } + else { + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); + relatedInfo.addRelatedFrom(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); + } } } @@ -227,15 +237,27 @@ private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_someValu OntologyNode fillerNode = graph.nodes.get( ((PropertyValueBNode) filler).getId() ); - List fillerIndividuals = - RdfListEvaluator.evaluateRdfList(fillerNode, graph) - .stream() - .map(propertyValue -> graph.nodes.get( ((PropertyValueURI) propertyValue).getUri() )) - .collect(Collectors.toList()); - - for(OntologyNode individualNode : fillerIndividuals) { - relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerNode, propertyUri, individualNode)); - relatedInfo.addRelatedFrom(individualNode, new PropertyValueRelated(fillerNode, propertyUri, classNode)); + List fillerIndividuals = new ArrayList<>(); + if(fillerNode != null){ + for (PropertyValue propertyValue : RdfListEvaluator.evaluateRdfList(fillerNode, graph)){ + if (propertyValue != null){ + OntologyNode ontologyNode = null; + try { + ontologyNode = graph.getNodeForPropertyValue(propertyValue); + logger.info("success property value"); + } catch (Exception e){ + logger.error("fail property value"); + } + if (ontologyNode != null && ontologyNode.uri != null){ + logger.info("ontology node uri: "+ontologyNode.uri); + fillerIndividuals.add(ontologyNode); + } + } + } + for(OntologyNode individualNode : fillerIndividuals) { + relatedInfo.addRelatedTo(classNode, new PropertyValueRelated(fillerNode, propertyUri, individualNode)); + relatedInfo.addRelatedFrom(individualNode, new PropertyValueRelated(fillerNode, propertyUri, classNode)); + } } return relatedInfo; } @@ -273,7 +295,7 @@ private static RelatedInfo annotateRelated_Class_subClassOf_Restriction_hasValue OntologyNode fillerNode = graph.nodes.get( ((PropertyValueURI) filler).getUri() ); - if(fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { + if(fillerNode != null && fillerNode.types.contains(OntologyNode.NodeType.INDIVIDUAL)) { // fillerNode is an individual relatedInfo.addRelatedTo(fillerNode, new PropertyValueRelated(fillerRestriction, propertyUri, classNode)); relatedInfo.addRelatedFrom(classNode, new PropertyValueRelated(fillerRestriction, propertyUri, fillerNode)); diff --git a/docker-compose.yml b/docker-compose.yml index b31bbf1a6..c54160272 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,6 +3,9 @@ services: ols4-dataload: environment: - JAVA_OPTS=${JAVA_OPTS} + - BATCH_SIZE=${BATCH_SIZE} + - POOL_SIZE=${POOL_SIZE} + - NOOF_ATTEMPTS=${NOOF_ATTEMPTS} build: context: . dockerfile: ./dataload/Dockerfile @@ -13,8 +16,16 @@ services: - ./testcases:/opt/ols/dataload/testcases:ro #- ./docker_out:/tmp/out:rw command: ./dataload.dockersh ${OLS4_DATALOAD_ARGS:-} + mongo: + image: mongo:8.0.4 + ports: + - 27017:27017 + volumes: + - ols-mongo-data:/data/db + command: + - mongod ols4-solr: - image: solr:9.0.0 + image: solr:9.7.0 environment: - SOLR_HOME=/mnt/ols4-solr-data ports: @@ -26,7 +37,7 @@ services: ols4-dataload: condition: service_completed_successfully ols4-neo4j: - image: neo4j:4.4.9-community + image: neo4j:5.26.0-community ports: - 7474:7474 - 7687:7687 @@ -46,12 +57,15 @@ services: environment: - OLS_SOLR_HOST=http://ols4-solr:8983 - OLS_NEO4J_HOST=bolt://ols4-neo4j:7687 + - spring.data.mongodb.host=mongo depends_on: - ols4-solr - - ols4-neo4j + - ols4-neo4j + - mongo links: - ols4-solr - - ols4-neo4j + - ols4-neo4j + - mongo ols4-frontend: build: context: ./frontend @@ -75,4 +89,7 @@ services: - ols4-backend volumes: ols4-neo4j-data: - ols4-solr-data: \ No newline at end of file + ols4-solr-data: + ols-mongo-data: + +