From 82f38b08ef08dd09edcee88e8230c31c7873520e Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 10 Jun 2024 16:15:09 +0200 Subject: [PATCH 01/97] changed backend directory as python module --- MANIFEST.in | 1 + backend.egg-info/PKG-INFO | 98 + backend.egg-info/SOURCES.txt | 85 + backend.egg-info/dependency_links.txt | 1 + backend.egg-info/entry_points.txt | 2 + backend.egg-info/requires.txt | 15 + backend.egg-info/top_level.txt | 1 + backend/__init__.py | 1 + backend/backend.py | 80 + backend/config/__init__.py | 0 backend/controller/__init__.py | 0 backend/controller/algoProviders.py | 10 +- backend/controller/data.py | 4 +- backend/controller/dataProviders.py | 10 +- backend/controller/exportMethods.py | 4 +- backend/controller/layouts.py | 2 +- backend/controller/models.py | 4 +- backend/controller/projects.py | 4 +- backend/controller/pythonModuleDp.py | 4 +- backend/controller/selection.py | 4 +- backend/controller/widgetConfigurations.py | 2 +- backend/init.py | 31 +- backend/modules/__init__.py | 0 backend/modules/algoProviders/AlgoProvider.py | 2 +- backend/modules/algoProviders/__init__.py | 0 .../algoProviders/algoProvidersManager.py | 8 +- .../integratedAlgoProvider/__init__.py | 0 .../algorithms/__init__.py | 0 .../integratedAlgoProvider.py | 6 +- backend/modules/dataProviders/__init__.py | 0 .../dataProviders/dataProviderManager.py | 15 +- .../pythonDataProvider/PythonDataProvider.py | 10 +- .../pythonDataProvider/__init__.py | 0 .../pythonDataProvider/dataUtils/__init__.py | 0 .../pythonDataProvider/dataUtils/hash.py | 2 +- .../pythonDataProvider/dataUtils/models.py | 4 +- .../pythonDataProvider/dataUtils/projects.py | 2 +- .../pythonDataProvider/dataUtils/samples.py | 2 +- .../dataUtils/selections.py | 2 +- .../pythonDataProvider/dataUtils/tags.py | 2 +- .../pythonDataProvider/dataUtils/tree.py | 2 +- .../webDataProvider/WebDataProvider.py | 14 +- .../dataProviders/webDataProvider/__init__.py | 0 .../webDataProvider/cache/__init__.py | 0 .../webDataProvider/cache/cache.py | 2 +- .../webDataProvider/http/__init__.py | 0 .../dataProviders/webDataProvider/http/api.py | 2 +- .../webDataProvider/useCases/__init__.py | 0 .../webDataProvider/useCases/data.py | 2 +- .../webDataProvider/useCases/models.py | 4 +- .../webDataProvider/useCases/projects.py | 6 +- .../webDataProvider/useCases/selections.py | 4 +- backend/modules/exportMethods/__init__.py | 0 backend/modules/exportMethods/exportUtils.py | 10 +- .../modules/exportMethods/methods/__init__.py | 0 .../exportMethods/methods/kafkaUtils.py | 2 +- .../exportMethods/methods/postUtils.py | 2 +- backend/server.py | 5 + backend/swagger.yaml | 102 +- backend/tests/__init__.py | 0 backend/utils/__init__.py | 0 backend/utils/layouts/__init__.py | 0 backend/utils/layouts/layouts.py | 2 +- .../utils/widgetConfigurations/__init__.py | 0 .../widgetConfigurations.py | 2 +- backend/websrv.py | 6 +- build/lib/backend/__init__.py | 1 + build/lib/backend/backend.py | 80 + build/lib/backend/config/__init__.py | 0 build/lib/backend/config/init_config.py | 240 +++ build/lib/backend/controller/__init__.py | 0 build/lib/backend/controller/algoProviders.py | 70 + build/lib/backend/controller/data.py | 38 + build/lib/backend/controller/dataProviders.py | 88 + build/lib/backend/controller/exportMethods.py | 51 + build/lib/backend/controller/layouts.py | 23 + build/lib/backend/controller/models.py | 56 + build/lib/backend/controller/projects.py | 98 + .../lib/backend/controller/pythonModuleDp.py | 66 + build/lib/backend/controller/selection.py | 49 + .../controller/statisticalOperations.py | 646 ++++++ .../controller/widgetConfigurations.py | 28 + build/lib/backend/init.py | 45 + build/lib/backend/modules/__init__.py | 0 .../modules/algoProviders/AlgoProvider.py | 109 + .../algoProviders/AlgoProviderException.py | 15 + .../backend/modules/algoProviders/__init__.py | 0 .../algoProviders/algoProvidersManager.py | 114 + .../integratedAlgoProvider/__init__.py | 0 .../algorithms/__init__.py | 0 .../algorithms/classificationErrorMetric.py | 99 + .../algorithms/regressionErrorMetric.py | 149 ++ .../integratedAlgoProvider.py | 103 + .../integratedAlgoProvider/utils.py | 59 + .../modules/dataProviders/DataProvider.py | 77 + .../dataProviders/DataProviderException.py | 15 + .../backend/modules/dataProviders/__init__.py | 0 .../dataProviders/dataProviderManager.py | 127 ++ .../pythonDataProvider/PythonDataProvider.py | 250 +++ .../pythonDataProvider/__init__.py | 0 .../pythonDataProvider/dataUtils/__init__.py | 0 .../pythonDataProvider/dataUtils/hash.py | 60 + .../pythonDataProvider/dataUtils/models.py | 281 +++ .../pythonDataProvider/dataUtils/projects.py | 256 +++ .../dataUtils/pythonModuleUtils.py | 121 ++ .../pythonDataProvider/dataUtils/samples.py | 130 ++ .../dataUtils/selections.py | 117 + .../pythonDataProvider/dataUtils/tags.py | 109 + .../pythonDataProvider/dataUtils/tree.py | 332 +++ .../webDataProvider/WebDataProvider.py | 106 + .../dataProviders/webDataProvider/__init__.py | 0 .../webDataProvider/cache/__init__.py | 0 .../webDataProvider/cache/cache.py | 90 + .../webDataProvider/http/__init__.py | 0 .../dataProviders/webDataProvider/http/api.py | 222 ++ .../webDataProvider/useCases/__init__.py | 0 .../webDataProvider/useCases/data.py | 23 + .../webDataProvider/useCases/models.py | 58 + .../webDataProvider/useCases/projects.py | 131 ++ .../webDataProvider/useCases/selections.py | 61 + .../backend/modules/exportMethods/__init__.py | 0 .../modules/exportMethods/exportClass.py | 43 + .../modules/exportMethods/exportUtils.py | 200 ++ .../modules/exportMethods/methods/__init__.py | 0 .../exportMethods/methods/kafkaUtils.py | 71 + .../exportMethods/methods/postUtils.py | 59 + build/lib/backend/server.py | 5 + build/lib/backend/swagger.yaml | 1914 +++++++++++++++++ build/lib/backend/tests/__init__.py | 0 .../lib/backend/tests/test_algo_providers.py | 95 + .../lib/backend/tests/test_data_providers.py | 96 + build/lib/backend/tests/test_layouts.py | 131 ++ .../tests/test_pythonModuleDataProvider.py | 137 ++ .../tests/test_widget_configurations.py | 85 + build/lib/backend/utils/__init__.py | 0 build/lib/backend/utils/layouts/__init__.py | 0 build/lib/backend/utils/layouts/layouts.py | 149 ++ build/lib/backend/utils/utils.py | 46 + .../utils/widgetConfigurations/__init__.py | 0 .../widgetConfigurations.py | 123 ++ build/lib/backend/websrv.py | 78 + build_and_run.sh | 17 + cspell.json | 3 +- data/layouts.json | 1 + data/widgetConfigurations.json | 1 + setup.py | 41 + 146 files changed, 8537 insertions(+), 136 deletions(-) create mode 100644 MANIFEST.in create mode 100644 backend.egg-info/PKG-INFO create mode 100644 backend.egg-info/SOURCES.txt create mode 100644 backend.egg-info/dependency_links.txt create mode 100644 backend.egg-info/entry_points.txt create mode 100644 backend.egg-info/requires.txt create mode 100644 backend.egg-info/top_level.txt create mode 100644 backend/__init__.py create mode 100644 backend/backend.py create mode 100644 backend/config/__init__.py create mode 100644 backend/controller/__init__.py create mode 100644 backend/modules/__init__.py create mode 100644 backend/modules/algoProviders/__init__.py create mode 100644 backend/modules/algoProviders/integratedAlgoProvider/__init__.py create mode 100644 backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py create mode 100644 backend/modules/dataProviders/__init__.py create mode 100644 backend/modules/dataProviders/pythonDataProvider/__init__.py create mode 100644 backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py create mode 100644 backend/modules/dataProviders/webDataProvider/__init__.py create mode 100644 backend/modules/dataProviders/webDataProvider/cache/__init__.py create mode 100644 backend/modules/dataProviders/webDataProvider/http/__init__.py create mode 100644 backend/modules/dataProviders/webDataProvider/useCases/__init__.py create mode 100644 backend/modules/exportMethods/__init__.py create mode 100644 backend/modules/exportMethods/methods/__init__.py create mode 100644 backend/server.py create mode 100644 backend/tests/__init__.py create mode 100644 backend/utils/__init__.py create mode 100644 backend/utils/layouts/__init__.py create mode 100644 backend/utils/widgetConfigurations/__init__.py create mode 100644 build/lib/backend/__init__.py create mode 100644 build/lib/backend/backend.py create mode 100644 build/lib/backend/config/__init__.py create mode 100644 build/lib/backend/config/init_config.py create mode 100644 build/lib/backend/controller/__init__.py create mode 100644 build/lib/backend/controller/algoProviders.py create mode 100644 build/lib/backend/controller/data.py create mode 100644 build/lib/backend/controller/dataProviders.py create mode 100644 build/lib/backend/controller/exportMethods.py create mode 100644 build/lib/backend/controller/layouts.py create mode 100644 build/lib/backend/controller/models.py create mode 100644 build/lib/backend/controller/projects.py create mode 100644 build/lib/backend/controller/pythonModuleDp.py create mode 100644 build/lib/backend/controller/selection.py create mode 100644 build/lib/backend/controller/statisticalOperations.py create mode 100644 build/lib/backend/controller/widgetConfigurations.py create mode 100644 build/lib/backend/init.py create mode 100644 build/lib/backend/modules/__init__.py create mode 100644 build/lib/backend/modules/algoProviders/AlgoProvider.py create mode 100644 build/lib/backend/modules/algoProviders/AlgoProviderException.py create mode 100644 build/lib/backend/modules/algoProviders/__init__.py create mode 100644 build/lib/backend/modules/algoProviders/algoProvidersManager.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py create mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py create mode 100644 build/lib/backend/modules/dataProviders/DataProvider.py create mode 100644 build/lib/backend/modules/dataProviders/DataProviderException.py create mode 100644 build/lib/backend/modules/dataProviders/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/dataProviderManager.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py create mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/http/api.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py create mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py create mode 100644 build/lib/backend/modules/exportMethods/__init__.py create mode 100644 build/lib/backend/modules/exportMethods/exportClass.py create mode 100644 build/lib/backend/modules/exportMethods/exportUtils.py create mode 100644 build/lib/backend/modules/exportMethods/methods/__init__.py create mode 100644 build/lib/backend/modules/exportMethods/methods/kafkaUtils.py create mode 100644 build/lib/backend/modules/exportMethods/methods/postUtils.py create mode 100644 build/lib/backend/server.py create mode 100644 build/lib/backend/swagger.yaml create mode 100644 build/lib/backend/tests/__init__.py create mode 100644 build/lib/backend/tests/test_algo_providers.py create mode 100644 build/lib/backend/tests/test_data_providers.py create mode 100644 build/lib/backend/tests/test_layouts.py create mode 100644 build/lib/backend/tests/test_pythonModuleDataProvider.py create mode 100644 build/lib/backend/tests/test_widget_configurations.py create mode 100644 build/lib/backend/utils/__init__.py create mode 100644 build/lib/backend/utils/layouts/__init__.py create mode 100644 build/lib/backend/utils/layouts/layouts.py create mode 100644 build/lib/backend/utils/utils.py create mode 100644 build/lib/backend/utils/widgetConfigurations/__init__.py create mode 100644 build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py create mode 100644 build/lib/backend/websrv.py create mode 100755 build_and_run.sh create mode 100644 data/layouts.json create mode 100644 data/widgetConfigurations.json create mode 100644 setup.py diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 000000000..32eda311e --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1 @@ +include backend/swagger.yaml diff --git a/backend.egg-info/PKG-INFO b/backend.egg-info/PKG-INFO new file mode 100644 index 000000000..dc70b2be1 --- /dev/null +++ b/backend.egg-info/PKG-INFO @@ -0,0 +1,98 @@ +Metadata-Version: 2.1 +Name: backend +Version: 0.1.0 +Summary: Python module that allows users to have a standalone version DebiAI. +Home-page: https://github.com/debiai/DebiAI +Author: Fady Bekkar +Author-email: fady.bekkar@irt-systemx.fr +Classifier: Programming Language :: Python :: 3 +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE + +
+ + +[![Online documentation](https://img.shields.io/static/v1?label=&message=Online documentation&color=0077de)](https://debiai.irt-systemx.fr/) +
+[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) +![cd](https://github.com/debiai/debiai/actions/workflows/docker-push.yml/badge.svg) +
+![Activity](https://img.shields.io/github/commit-activity/m/debiai/debiai) +![Last commit](https://img.shields.io/github/last-commit/debiai/debiai) +
+[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Code style: flake8](https://img.shields.io/badge/code%20style-flake8-1c4a6c.svg)](https://flake8.pycqa.org/en/latest/) +[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier) + +
+ +## Why DebiAI ? + +DebiAI is an open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. + +DebiAI provides data scientists with features to: + +- Identify biases and errors in your input, results, contextual or ground truth project data +- Make a comparison of the performance of your ML according to their contextual results +- Select and create sets of data graphically for further analysis or (re-)training purposes +- Quickly create and share statistical visualizations of your project data for your team or client + +## Documentation + +The full documentation is available on the [DebiAI website](https://debiai.irt-systemx.fr/). + +## Dashboard + +DebiAI has a Web Graphical User Interface with a complete data visualization toolkit offering many statistical analysis tools: + +

+ +

+ +The dashboard is highly customizable and can be used for large and small projects. Learn more about the [widgets and how to use them](https://debiai.irt-systemx.fr/dashboard/widgets/). + +## Data + +DebiAI is designed to be used for any kind projects and data, it is particularly useful for projects that involve many contextual data. + +DebiAI provide two main ways to import your data: + +- A [DebiAI Python module](https://debiai.irt-systemx.fr/dataInsertion/pythonModule/) is provided to insert, directly from your Python workflow, the data and model results that you want to study. +- You can also create a [Data Provider](https://debiai.irt-systemx.fr/dataInsertion/dataProviders/), a Web API that will allow DebiAI to reach your data and model results from any programming language and any data sources without duplication. + Check out the [DebiAI Data Provider NodeJs template](https://github.com/debiai/data-provider-nodejs-template) for an example of a Data Provider. + +## Installation + +DebiAI is available as a Docker image. To install it, you can follow the [installation guide](https://debiai.irt-systemx.fr/introduction/gettingStarted/installation). + +## Use cases + +As part of the [Confiance.ai](https://www.confiance.ai/) program, we (the [IRT SystemX](https://www.irt-systemx.fr/)) are using and developing DebiAI for a wide range of use cases. + +One of them is the [Valeo - WoodScape](https://woodscape.valeo.com/) dataset: + +### Valeo - WoodScape + +The Valeo - WoodScape dataset is an annotated image dataset taken from 4 fisheye cameras. DebiAI is used to analyze the dataset for biases and outliers in the data. + +

+ +

+ +Withing the [Confiance.ai](https://www.confiance.ai/) program, DebiAI has been able to import the project data, detect biases, find annotations errors and export them to the project's image annotation tool. + +--- + +

+ DebiAI is developed by + + + + And is integrated in + + + +

+ +--- diff --git a/backend.egg-info/SOURCES.txt b/backend.egg-info/SOURCES.txt new file mode 100644 index 000000000..abd98b8ae --- /dev/null +++ b/backend.egg-info/SOURCES.txt @@ -0,0 +1,85 @@ +LICENSE +MANIFEST.in +README.md +setup.py +backend/__init__.py +backend/backend.py +backend/init.py +backend/server.py +backend/swagger.yaml +backend/websrv.py +backend.egg-info/PKG-INFO +backend.egg-info/SOURCES.txt +backend.egg-info/dependency_links.txt +backend.egg-info/entry_points.txt +backend.egg-info/requires.txt +backend.egg-info/top_level.txt +backend/config/__init__.py +backend/config/init_config.py +backend/controller/__init__.py +backend/controller/algoProviders.py +backend/controller/data.py +backend/controller/dataProviders.py +backend/controller/exportMethods.py +backend/controller/layouts.py +backend/controller/models.py +backend/controller/projects.py +backend/controller/pythonModuleDp.py +backend/controller/selection.py +backend/controller/statisticalOperations.py +backend/controller/widgetConfigurations.py +backend/modules/__init__.py +backend/modules/algoProviders/AlgoProvider.py +backend/modules/algoProviders/AlgoProviderException.py +backend/modules/algoProviders/__init__.py +backend/modules/algoProviders/algoProvidersManager.py +backend/modules/algoProviders/integratedAlgoProvider/__init__.py +backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +backend/modules/algoProviders/integratedAlgoProvider/utils.py +backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py +backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py +backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py +backend/modules/dataProviders/DataProvider.py +backend/modules/dataProviders/DataProviderException.py +backend/modules/dataProviders/__init__.py +backend/modules/dataProviders/dataProviderManager.py +backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py +backend/modules/dataProviders/pythonDataProvider/__init__.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py +backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py +backend/modules/dataProviders/webDataProvider/WebDataProvider.py +backend/modules/dataProviders/webDataProvider/__init__.py +backend/modules/dataProviders/webDataProvider/cache/__init__.py +backend/modules/dataProviders/webDataProvider/cache/cache.py +backend/modules/dataProviders/webDataProvider/http/__init__.py +backend/modules/dataProviders/webDataProvider/http/api.py +backend/modules/dataProviders/webDataProvider/useCases/__init__.py +backend/modules/dataProviders/webDataProvider/useCases/data.py +backend/modules/dataProviders/webDataProvider/useCases/models.py +backend/modules/dataProviders/webDataProvider/useCases/projects.py +backend/modules/dataProviders/webDataProvider/useCases/selections.py +backend/modules/exportMethods/__init__.py +backend/modules/exportMethods/exportClass.py +backend/modules/exportMethods/exportUtils.py +backend/modules/exportMethods/methods/__init__.py +backend/modules/exportMethods/methods/kafkaUtils.py +backend/modules/exportMethods/methods/postUtils.py +backend/tests/__init__.py +backend/tests/test_algo_providers.py +backend/tests/test_data_providers.py +backend/tests/test_layouts.py +backend/tests/test_pythonModuleDataProvider.py +backend/tests/test_widget_configurations.py +backend/utils/__init__.py +backend/utils/utils.py +backend/utils/layouts/__init__.py +backend/utils/layouts/layouts.py +backend/utils/widgetConfigurations/__init__.py +backend/utils/widgetConfigurations/widgetConfigurations.py \ No newline at end of file diff --git a/backend.egg-info/dependency_links.txt b/backend.egg-info/dependency_links.txt new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/backend.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/backend.egg-info/entry_points.txt b/backend.egg-info/entry_points.txt new file mode 100644 index 000000000..5639d1fa0 --- /dev/null +++ b/backend.egg-info/entry_points.txt @@ -0,0 +1,2 @@ +[console_scripts] +debiai-start = backend.server:run diff --git a/backend.egg-info/requires.txt b/backend.egg-info/requires.txt new file mode 100644 index 000000000..33fff5e2a --- /dev/null +++ b/backend.egg-info/requires.txt @@ -0,0 +1,15 @@ +Flask==2.0.3 +flask_cors==3.0.8 +connexion==2.6.0 +requests==2.25.1 +swagger-ui-bundle==0.0.5 +pandas==1.5.1 +scipy==1.9.3 +ujson==5.8.0 +sklearn==0.0 +kafka-python==2.0.2 +openapi_spec_validator==0.2.8 +PyYAML==6.0 +cacheout==0.14.1 +termcolor==2.3.0 +werkzeug==2.2.2 diff --git a/backend.egg-info/top_level.txt b/backend.egg-info/top_level.txt new file mode 100644 index 000000000..e34d8c321 --- /dev/null +++ b/backend.egg-info/top_level.txt @@ -0,0 +1 @@ +backend diff --git a/backend/__init__.py b/backend/__init__.py new file mode 100644 index 000000000..4a22300fc --- /dev/null +++ b/backend/__init__.py @@ -0,0 +1 @@ +from backend.backend import send_frontend, create_app, start_server \ No newline at end of file diff --git a/backend/backend.py b/backend/backend.py new file mode 100644 index 000000000..7429936da --- /dev/null +++ b/backend/backend.py @@ -0,0 +1,80 @@ +import connexion +import os +import requests +from termcolor import colored +from flask_cors import CORS +from flask import send_from_directory, request, Response +from backend.init import init +from backend.utils.utils import get_app_version +from backend.config.init_config import DEBUG_COLOR + +DEV_FRONTEND_URL = "http://localhost:8080/" +PORT = 3000 + + +def send_frontend(path): + if path == "/": + path = "index.html" + + # If production, use the index.html from the dist folder + if os.getenv("FLASK_ENV") == "production": + return send_from_directory("dist", path) + + # In development, redirect to the DEV_FRONTEND_URL + else: + if request.method == "GET": + try: + resp = requests.get(f"{DEV_FRONTEND_URL}{path}") + excluded_headers = [ + "content-encoding", + "content-length", + "transfer-encoding", + "connection", + ] + headers = [ + (name, value) + for (name, value) in resp.raw.headers.items() + if name.lower() not in excluded_headers + ] + response = Response(resp.content, resp.status_code, headers) + return response + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return ( + "You are in a development environment and the DebAI frontend" + + "is not available at the url : " + + DEV_FRONTEND_URL, + 503, + ) + else: + print("Unexpected request method") + + +def create_app(): + app = connexion.App(__name__) + app.add_api("swagger.yaml", strict_validation=True) + CORS(app.app) + + # For serving the dashboard + @app.route("/") + def send_index(): + return send_frontend("/") + + # For serving the dashboard assets + @app.route("/") + def send_supporting_elements(path): + return send_frontend(path) + + return app + + +def start_server(): + # Run DebiAI init + print("================= DebiAI " + get_app_version() + " ====================") + init() + print("======================== RUN =======================") + print( + " DebiAI is available at " + + colored("http://localhost:" + str(PORT), DEBUG_COLOR) + ) + app = create_app() + app.run(port=PORT, debug=True) diff --git a/backend/config/__init__.py b/backend/config/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/controller/__init__.py b/backend/controller/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/controller/algoProviders.py b/backend/controller/algoProviders.py index 6318d4469..3116cf164 100644 --- a/backend/controller/algoProviders.py +++ b/backend/controller/algoProviders.py @@ -1,11 +1,11 @@ ############################################################################# # Imports ############################################################################# -from config.init_config import get_config -from utils.utils import is_url_valid, is_valid_name -import modules.algoProviders.algoProvidersManager as algo_provider_manager -from modules.algoProviders.AlgoProviderException import AlgoProviderException -from modules.algoProviders.AlgoProvider import AlgoProvider +from backend.config.init_config import get_config +from backend.utils.utils import is_url_valid, is_valid_name +import backend.modules.algoProviders.algoProvidersManager as algo_provider_manager +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProvider import AlgoProvider ############################################################################# # Algo providers Management diff --git a/backend/controller/data.py b/backend/controller/data.py index 4cf45c011..420cb5d6e 100644 --- a/backend/controller/data.py +++ b/backend/controller/data.py @@ -1,8 +1,8 @@ ############################################################################# # Imports ############################################################################# -import modules.dataProviders.dataProviderManager as data_provider_manager -from modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Data Management diff --git a/backend/controller/dataProviders.py b/backend/controller/dataProviders.py index d9f9a7bbe..503a0fa21 100644 --- a/backend/controller/dataProviders.py +++ b/backend/controller/dataProviders.py @@ -1,11 +1,11 @@ ############################################################################# # Imports ############################################################################# -from config.init_config import get_config -from modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider -from utils.utils import is_url_valid -import modules.dataProviders.dataProviderManager as data_provider_manager -from modules.dataProviders.DataProviderException import DataProviderException +from backend.config.init_config import get_config +from backend.modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider +from backend.utils.utils import is_url_valid +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Data Providers Management diff --git a/backend/controller/exportMethods.py b/backend/controller/exportMethods.py index b521a4931..227c40767 100644 --- a/backend/controller/exportMethods.py +++ b/backend/controller/exportMethods.py @@ -1,5 +1,5 @@ -from config.init_config import get_config -import modules.exportMethods.exportUtils as exportUtils +from backend.config.init_config import get_config +import backend.modules.exportMethods.exportUtils as exportUtils ############################################################################# # Export API Management diff --git a/backend/controller/layouts.py b/backend/controller/layouts.py index 7edcb05bd..da3962a15 100644 --- a/backend/controller/layouts.py +++ b/backend/controller/layouts.py @@ -1,7 +1,7 @@ ############################################################################# # Imports ############################################################################# -import utils.layouts.layouts as layoutsUtils +import backend.utils.layouts.layouts as layoutsUtils ############################################################################# # Analysis dashboard layout Management diff --git a/backend/controller/models.py b/backend/controller/models.py index 4b4802bcb..4137e33ea 100644 --- a/backend/controller/models.py +++ b/backend/controller/models.py @@ -2,8 +2,8 @@ # Imports ############################################################################# -import modules.dataProviders.dataProviderManager as data_provider_manager -from modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # MODELS Management diff --git a/backend/controller/projects.py b/backend/controller/projects.py index ad07770b1..fdfd26ee3 100644 --- a/backend/controller/projects.py +++ b/backend/controller/projects.py @@ -1,8 +1,8 @@ ############################################################################# # Imports ############################################################################# -from modules.dataProviders.DataProviderException import DataProviderException -import modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager ############################################################################# # PROJECTS Management diff --git a/backend/controller/pythonModuleDp.py b/backend/controller/pythonModuleDp.py index f9b08c908..432b57944 100644 --- a/backend/controller/pythonModuleDp.py +++ b/backend/controller/pythonModuleDp.py @@ -1,5 +1,5 @@ -from modules.dataProviders.DataProviderException import DataProviderException -import modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager # Project diff --git a/backend/controller/selection.py b/backend/controller/selection.py index f99a0506c..2e5ddc0e1 100644 --- a/backend/controller/selection.py +++ b/backend/controller/selection.py @@ -2,8 +2,8 @@ # Imports ############################################################################# -import modules.dataProviders.dataProviderManager as data_provider_manager -from modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Selections Management diff --git a/backend/controller/widgetConfigurations.py b/backend/controller/widgetConfigurations.py index ca066e32b..90b3e3764 100644 --- a/backend/controller/widgetConfigurations.py +++ b/backend/controller/widgetConfigurations.py @@ -1,7 +1,7 @@ ############################################################################# # Imports ############################################################################# -import utils.widgetConfigurations.widgetConfigurations as widgetConfUtils +import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils ############################################################################# # Widget configuration Management diff --git a/backend/init.py b/backend/init.py index b9ee094de..d0f645742 100644 --- a/backend/init.py +++ b/backend/init.py @@ -1,9 +1,28 @@ -import modules.dataProviders.dataProviderManager as dataProviderManager -import modules.exportMethods.exportUtils as exportUtils -import modules.algoProviders.algoProvidersManager as algoProvidersManager -import utils.widgetConfigurations.widgetConfigurations as widgetConfUtils -import utils.layouts.layouts as layoutsUtils -import config.init_config as config +# import backend.modules.dataProviders.dataProviderManager as dataProviderManager +# import backend.modules.exportMethods.exportUtils as exportUtils +# import backend.modules.algoProviders.algoProvidersManager as algoProvidersManager +# import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils +# import backend.utils.layouts.layouts as layoutsUtils +# import config.init_config as config + +from backend.modules.dataProviders import ( + dataProviderManager, +) +from backend.modules.exportMethods import ( + exportUtils, +) +from backend.modules.algoProviders import ( + algoProvidersManager, +) +from backend.utils.widgetConfigurations import ( + widgetConfigurations as widgetConfUtils, +) +from backend.utils.layouts import ( + layouts as layoutsUtils, +) +from backend.config import ( + init_config as config, +) def init(): diff --git a/backend/modules/__init__.py b/backend/modules/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/algoProviders/AlgoProvider.py b/backend/modules/algoProviders/AlgoProvider.py index b0ec60c12..d446c655a 100644 --- a/backend/modules/algoProviders/AlgoProvider.py +++ b/backend/modules/algoProviders/AlgoProvider.py @@ -1,7 +1,7 @@ # Class for AlgoProvider import requests import json -from modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException class AlgoProvider: diff --git a/backend/modules/algoProviders/__init__.py b/backend/modules/algoProviders/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/algoProviders/algoProvidersManager.py b/backend/modules/algoProviders/algoProvidersManager.py index 727dd517e..17d2bf79d 100644 --- a/backend/modules/algoProviders/algoProvidersManager.py +++ b/backend/modules/algoProviders/algoProvidersManager.py @@ -1,9 +1,9 @@ from termcolor import colored -from config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR -from modules.algoProviders.AlgoProviderException import AlgoProviderException -from modules.algoProviders.AlgoProvider import AlgoProvider -from modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( +from backend.config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProvider import AlgoProvider +from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( IntegratedAlgoProvider, ) diff --git a/backend/modules/algoProviders/integratedAlgoProvider/__init__.py b/backend/modules/algoProviders/integratedAlgoProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py b/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py index ad8b272bb..4b27ea380 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -1,9 +1,9 @@ import os from termcolor import colored -from config.init_config import DEBUG_COLOR -from modules.algoProviders.AlgoProvider import AlgoProvider -from modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.config.init_config import DEBUG_COLOR +from backend.modules.algoProviders.AlgoProvider import AlgoProvider +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException def _get_algorithm_python(algorithm_name): diff --git a/backend/modules/dataProviders/__init__.py b/backend/modules/dataProviders/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/dataProviderManager.py b/backend/modules/dataProviders/dataProviderManager.py index 74da8a633..2b651dbb7 100644 --- a/backend/modules/dataProviders/dataProviderManager.py +++ b/backend/modules/dataProviders/dataProviderManager.py @@ -1,12 +1,19 @@ from termcolor import colored -from config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR -from modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider -from modules.dataProviders.pythonDataProvider.PythonDataProvider import ( +from backend.config.init_config import ( + get_config, + DEBUG_COLOR, + ERROR_COLOR, + SUCCESS_COLOR, +) +from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( + WebDataProvider, +) +from backend.modules.dataProviders.pythonDataProvider.PythonDataProvider import ( PythonDataProvider, PYTHON_DATA_PROVIDER_ID, ) -from modules.dataProviders.DataProviderException import DataProviderException +from backend.modules.dataProviders.DataProviderException import DataProviderException data_providers_list = [] python_data_provider_disabled = True diff --git a/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py b/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py index 7055bf282..c143d7a76 100644 --- a/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py +++ b/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py @@ -1,7 +1,7 @@ -from config.init_config import get_config -from modules.dataProviders.DataProvider import DataProvider -from modules.dataProviders.DataProviderException import DataProviderException -from modules.dataProviders.pythonDataProvider.dataUtils import ( +from backend.config.init_config import get_config +from backend.modules.dataProviders.DataProvider import DataProvider +from backend.modules.dataProviders.DataProviderException import DataProviderException +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, samples, @@ -10,7 +10,7 @@ tree, ) -from utils.utils import get_app_version +from backend.utils.utils import get_app_version PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" diff --git a/backend/modules/dataProviders/pythonDataProvider/__init__.py b/backend/modules/dataProviders/pythonDataProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py index b806d449c..844a309fa 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py @@ -1,7 +1,7 @@ import hashlib import ujson as json -from modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py index 81f615e66..075df2087 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py @@ -1,11 +1,11 @@ import os import ujson as json -from modules.dataProviders.pythonDataProvider.dataUtils import ( +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, tree, ) -from modules.dataProviders.DataProviderException import DataProviderException +from backend.modules.dataProviders.DataProviderException import DataProviderException DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py index e02bd5769..dcecd3f8a 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py @@ -2,7 +2,7 @@ import shutil import ujson as json -from modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py index 8cfdb402c..6ee3676ef 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py @@ -1,4 +1,4 @@ -from modules.dataProviders.pythonDataProvider.dataUtils import ( +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, tree, hash, diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py index 206d700c9..8281886ef 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py @@ -1,7 +1,7 @@ import os import ujson as json -from modules.dataProviders.pythonDataProvider.dataUtils import ( +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, ) diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py index ff4fe0ee3..13f6ce476 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py @@ -1,5 +1,5 @@ import os -from modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py index 40893c698..08fec8241 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py @@ -1,7 +1,7 @@ import ujson as json import os -from modules.dataProviders.pythonDataProvider.dataUtils import ( +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, models, diff --git a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py index 60d799f46..3ff8afb3a 100644 --- a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py +++ b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -1,22 +1,22 @@ -from modules.dataProviders.DataProvider import DataProvider -from modules.dataProviders.webDataProvider.useCases.data import ( +from backend.modules.dataProviders.DataProvider import DataProvider +from backend.modules.dataProviders.webDataProvider.useCases.data import ( get_project_id_list, get_project_samples, ) -from modules.dataProviders.webDataProvider.useCases.projects import ( +from backend.modules.dataProviders.webDataProvider.useCases.projects import ( get_all_projects_from_data_provider, get_single_project_from_data_provider, delete_project, ) -from modules.dataProviders.webDataProvider.useCases.models import ( +from backend.modules.dataProviders.webDataProvider.useCases.models import ( get_model_results, get_models_info, get_model_result_id, delete_model, ) -import modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections -from modules.dataProviders.webDataProvider.http.api import get_info, get_status -from modules.dataProviders.webDataProvider.cache.cache import Cache +import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections +from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status +from backend.modules.dataProviders.webDataProvider.cache.cache import Cache # WebDataProvider class, allow to get data from a web data-provider diff --git a/backend/modules/dataProviders/webDataProvider/__init__.py b/backend/modules/dataProviders/webDataProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/webDataProvider/cache/__init__.py b/backend/modules/dataProviders/webDataProvider/cache/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/webDataProvider/cache/cache.py b/backend/modules/dataProviders/webDataProvider/cache/cache.py index 9022e8a47..68b2c05f7 100644 --- a/backend/modules/dataProviders/webDataProvider/cache/cache.py +++ b/backend/modules/dataProviders/webDataProvider/cache/cache.py @@ -3,7 +3,7 @@ # It will mainly save the id list of samples, selections and models results # The ability to cache and the time to live are configurable in the config file -from config.init_config import get_config +from backend.config.init_config import get_config from cacheout import Cache as CacheoutCache diff --git a/backend/modules/dataProviders/webDataProvider/http/__init__.py b/backend/modules/dataProviders/webDataProvider/http/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/webDataProvider/http/api.py b/backend/modules/dataProviders/webDataProvider/http/api.py index 5be2c8df0..1b32e7fa4 100644 --- a/backend/modules/dataProviders/webDataProvider/http/api.py +++ b/backend/modules/dataProviders/webDataProvider/http/api.py @@ -1,6 +1,6 @@ import requests import json -from modules.dataProviders.DataProviderException import DataProviderException +from backend.modules.dataProviders.DataProviderException import DataProviderException # Todo : change info if in not alive anymore diff --git a/backend/modules/dataProviders/webDataProvider/useCases/__init__.py b/backend/modules/dataProviders/webDataProvider/useCases/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/dataProviders/webDataProvider/useCases/data.py b/backend/modules/dataProviders/webDataProvider/useCases/data.py index 10a905415..eb3ba8d63 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/data.py +++ b/backend/modules/dataProviders/webDataProvider/useCases/data.py @@ -1,4 +1,4 @@ -import modules.dataProviders.webDataProvider.http.api as api +import backend.modules.dataProviders.webDataProvider.http.api as api # # UseCase folder role is the middleware between class methods and http requests diff --git a/backend/modules/dataProviders/webDataProvider/useCases/models.py b/backend/modules/dataProviders/webDataProvider/useCases/models.py index f24002f64..6b646b261 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/models.py +++ b/backend/modules/dataProviders/webDataProvider/useCases/models.py @@ -1,5 +1,5 @@ -import modules.dataProviders.webDataProvider.http.api as api -from modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.webDataProvider.http.api as api +from backend.modules.dataProviders.DataProviderException import DataProviderException def get_models_info(url, project_id): diff --git a/backend/modules/dataProviders/webDataProvider/useCases/projects.py b/backend/modules/dataProviders/webDataProvider/useCases/projects.py index 11309f2f3..c3dad454b 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/projects.py +++ b/backend/modules/dataProviders/webDataProvider/useCases/projects.py @@ -1,7 +1,7 @@ -import modules.dataProviders.webDataProvider.http.api as api +import backend.modules.dataProviders.webDataProvider.http.api as api -from modules.dataProviders.webDataProvider.useCases.models import get_models_info -from modules.dataProviders.webDataProvider.useCases.selections import ( +from backend.modules.dataProviders.webDataProvider.useCases.models import get_models_info +from backend.modules.dataProviders.webDataProvider.useCases.selections import ( get_project_selections, ) diff --git a/backend/modules/dataProviders/webDataProvider/useCases/selections.py b/backend/modules/dataProviders/webDataProvider/useCases/selections.py index 93e2cefcc..794328d4c 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/selections.py +++ b/backend/modules/dataProviders/webDataProvider/useCases/selections.py @@ -1,5 +1,5 @@ -import modules.dataProviders.webDataProvider.http.api as api -from modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.webDataProvider.http.api as api +from backend.modules.dataProviders.DataProviderException import DataProviderException def get_project_selections(url, project_id): diff --git a/backend/modules/exportMethods/__init__.py b/backend/modules/exportMethods/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/exportMethods/exportUtils.py b/backend/modules/exportMethods/exportUtils.py index 468b5ed7f..d7502bcd6 100644 --- a/backend/modules/exportMethods/exportUtils.py +++ b/backend/modules/exportMethods/exportUtils.py @@ -1,10 +1,10 @@ -from config.init_config import get_config -import modules.dataProviders.dataProviderManager as data_provider_manager -from modules.dataProviders.DataProviderException import DataProviderException +from backend.config.init_config import get_config +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException import time -from modules.exportMethods.methods.kafkaUtils import KafkaExportType -from modules.exportMethods.methods.postUtils import PostExportType +from backend.modules.exportMethods.methods.kafkaUtils import KafkaExportType +from backend.modules.exportMethods.methods.postUtils import PostExportType ############################################################################# # diff --git a/backend/modules/exportMethods/methods/__init__.py b/backend/modules/exportMethods/methods/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/modules/exportMethods/methods/kafkaUtils.py b/backend/modules/exportMethods/methods/kafkaUtils.py index 61cc67243..09c708df1 100644 --- a/backend/modules/exportMethods/methods/kafkaUtils.py +++ b/backend/modules/exportMethods/methods/kafkaUtils.py @@ -1,5 +1,5 @@ from kafka import KafkaProducer -from modules.exportMethods.exportClass import ExportType, ExportMethod +from backend.modules.exportMethods.exportClass import ExportType, ExportMethod import json ############################################################################# diff --git a/backend/modules/exportMethods/methods/postUtils.py b/backend/modules/exportMethods/methods/postUtils.py index db5b3dbbd..a37d18aff 100644 --- a/backend/modules/exportMethods/methods/postUtils.py +++ b/backend/modules/exportMethods/methods/postUtils.py @@ -1,4 +1,4 @@ -from modules.exportMethods.exportClass import ExportType, ExportMethod +from backend.modules.exportMethods.exportClass import ExportType, ExportMethod import requests ############################################################################# diff --git a/backend/server.py b/backend/server.py new file mode 100644 index 000000000..0c3e2210c --- /dev/null +++ b/backend/server.py @@ -0,0 +1,5 @@ +from backend.backend import start_server + + +def run(): + start_server() diff --git a/backend/swagger.yaml b/backend/swagger.yaml index f2aef6f38..a6cb5c5b4 100644 --- a/backend/swagger.yaml +++ b/backend/swagger.yaml @@ -12,7 +12,7 @@ paths: /version: get: summary: Ping to check if the backend is running - operationId: controller.projects.ping + operationId: backend.controller.projects.ping responses: 200: description: The server is online @@ -22,7 +22,7 @@ paths: get: summary: Get data providers list and status tags: [Data Providers] - operationId: controller.dataProviders.get_data_providers + operationId: backend.controller.dataProviders.get_data_providers responses: 200: description: List of data providers @@ -34,7 +34,7 @@ paths: post: summary: Add data provider to data providers list tags: [Data Providers] - operationId: controller.dataProviders.post_data_providers + operationId: backend.controller.dataProviders.post_data_providers parameters: - name: data in: body @@ -63,7 +63,7 @@ paths: delete: summary: Delete data providers from the list tags: [Data Providers] - operationId: controller.dataProviders.delete_data_providers + operationId: backend.controller.dataProviders.delete_data_providers parameters: - name: dataProviderId in: path @@ -80,7 +80,7 @@ paths: get: summary: Get general informations about a data provider, like his version or the max number sample for each type of request tags: [Data Providers] - operationId: controller.dataProviders.get_data_provider_info + operationId: backend.controller.dataProviders.get_data_provider_info parameters: - name: dataProviderId in: path @@ -126,7 +126,7 @@ paths: get: summary: Get the projects overview tags: [Project] - operationId: controller.projects.get_projects + operationId: backend.controller.projects.get_projects responses: 200: description: List of project overviews @@ -138,7 +138,7 @@ paths: post: summary: Post a new project tags: [Project] - operationId: controller.pythonModuleDp.post_project + operationId: backend.controller.pythonModuleDp.post_project parameters: - name: data in: body @@ -179,7 +179,7 @@ paths: get: summary: Get the projects overview for a data provider tags: [Project] - operationId: controller.projects.get_data_providers_project + operationId: backend.controller.projects.get_data_providers_project parameters: - name: dataProviderId in: path @@ -197,7 +197,7 @@ paths: get: summary: Get project name, nb of models & nb of selections (overviews of a project) tags: [Project] - operationId: controller.projects.get_project + operationId: backend.controller.projects.get_project parameters: - name: dataProviderId in: path @@ -216,7 +216,7 @@ paths: delete: summary: remove a project from ID tags: [Project] - operationId: controller.projects.delete_project + operationId: backend.controller.projects.delete_project parameters: - name: dataProviderId in: path @@ -236,7 +236,7 @@ paths: post: summary: Get the project data id list tags: [Project] - operationId: controller.projects.get_data_id_list + operationId: backend.controller.projects.get_data_id_list parameters: - name: dataProviderId in: path @@ -290,7 +290,7 @@ paths: post: summary: add a new data blocks level structure tags: [Project] - operationId: controller.pythonModuleDp.post_block_levels + operationId: backend.controller.pythonModuleDp.post_block_levels parameters: - name: dataProviderId in: path @@ -348,7 +348,7 @@ paths: post: summary: add a new expected results structure tags: [Project] - operationId: controller.pythonModuleDp.post_resultsStructure + operationId: backend.controller.pythonModuleDp.post_resultsStructure parameters: - name: dataProviderId in: path @@ -399,7 +399,7 @@ paths: post: summary: add a model tags: [Model] - operationId: controller.models.post_model + operationId: backend.controller.models.post_model parameters: - name: dataProviderId in: path @@ -436,7 +436,7 @@ paths: get: summary: Get a model results id list tags: [Model] - operationId: controller.models.get_model_id_list + operationId: backend.controller.models.get_model_id_list parameters: - name: dataProviderId in: path @@ -463,7 +463,7 @@ paths: delete: summary: remove a model tags: [Model] - operationId: controller.models.delete_model + operationId: backend.controller.models.delete_model parameters: - name: dataProviderId in: path @@ -483,11 +483,11 @@ paths: 404: description: model or project doesn't exist - /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/resultsDict: - post: + ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/resultsDict + : post: summary: Add a results to a model tags: [Model] - operationId: controller.pythonModuleDp.add_results_dict + operationId: backend.controller.pythonModuleDp.add_results_dict parameters: - name: dataProviderId in: path @@ -524,11 +524,11 @@ paths: 404: description: model or project doesn't exist - /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/getModelResults: - post: + ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/getModelResults + : post: summary: Get the model results from a sample list tags: [Model] - operationId: controller.models.get_results + operationId: backend.controller.models.get_results parameters: - name: dataProviderId in: path @@ -570,7 +570,7 @@ paths: post: summary: add a tree to an existing project block tree tags: [Block] - operationId: controller.pythonModuleDp.post_block_tree + operationId: backend.controller.pythonModuleDp.post_block_tree parameters: - name: dataProviderId in: path @@ -605,7 +605,7 @@ paths: post: summary: get a project tree form a sample list tags: [Block] - operationId: controller.data.get_data + operationId: backend.controller.data.get_data parameters: - name: dataProviderId in: path @@ -647,7 +647,7 @@ paths: get: summary: Get the project selections tags: [Selection] - operationId: controller.selection.get_selections + operationId: backend.controller.selection.get_selections parameters: - name: dataProviderId in: path @@ -668,7 +668,7 @@ paths: post: summary: add a selection tags: [Selection] - operationId: controller.selection.post_selection + operationId: backend.controller.selection.post_selection parameters: - name: dataProviderId in: path @@ -706,11 +706,11 @@ paths: schema: type: object - /data-providers/{dataProviderId}/projects/{projectId}/selections/{selectionId}: - get: + ? /data-providers/{dataProviderId}/projects/{projectId}/selections/{selectionId} + : get: summary: Get a project selection id list tags: [Selection] - operationId: controller.selection.get_selection_id_list + operationId: backend.controller.selection.get_selection_id_list parameters: - name: dataProviderId in: path @@ -737,7 +737,7 @@ paths: delete: summary: delete a selection tags: [Selection] - operationId: controller.selection.delete_selection + operationId: backend.controller.selection.delete_selection parameters: - name: dataProviderId in: path @@ -760,7 +760,7 @@ paths: get: summary: Get all layouts tags: [Layouts] - operationId: controller.layouts.get_layouts + operationId: backend.controller.layouts.get_layouts responses: 200: description: Layouts for all projects @@ -772,7 +772,7 @@ paths: post: summary: Add a layout tags: [Layouts] - operationId: controller.layouts.post_layout + operationId: backend.controller.layouts.post_layout parameters: - name: data in: body @@ -819,7 +819,7 @@ paths: delete: summary: Delete a layout tags: [Layouts] - operationId: controller.layouts.delete_layout + operationId: backend.controller.layouts.delete_layout parameters: - name: id in: path @@ -839,7 +839,7 @@ paths: summary: Get all widget configurations overview, return the number of configurations for each widget tags: [Widget configurations] - operationId: controller.widgetConfigurations.get_all_configurations + operationId: backend.controller.widgetConfigurations.get_all_configurations responses: 200: description: Widget configurations number for each widget @@ -855,7 +855,7 @@ paths: get: summary: Get the widget configurations tags: [Widget configurations] - operationId: controller.widgetConfigurations.get_widget_configurations + operationId: backend.controller.widgetConfigurations.get_widget_configurations parameters: - name: widgetKey in: path @@ -897,7 +897,7 @@ paths: post: summary: Add a widget configuration tags: [Widget configurations] - operationId: controller.widgetConfigurations.post_configuration + operationId: backend.controller.widgetConfigurations.post_configuration parameters: - name: widgetKey in: path @@ -942,7 +942,7 @@ paths: delete: summary: Delete a widget configuration tags: [Widget configurations] - operationId: controller.widgetConfigurations.delete_configuration + operationId: backend.controller.widgetConfigurations.delete_configuration parameters: - name: widgetKey in: path @@ -966,7 +966,7 @@ paths: get: summary: Get the application export methods tags: [Export] - operationId: controller.exportMethods.get_export_methods + operationId: backend.controller.exportMethods.get_export_methods responses: 200: description: Export method list @@ -997,7 +997,7 @@ paths: post: summary: Create an export method for the app tags: [Export] - operationId: controller.exportMethods.post_export_method + operationId: backend.controller.exportMethods.post_export_method parameters: - name: data in: body @@ -1029,7 +1029,7 @@ paths: delete: summary: Remove an export method for the app tags: [Export] - operationId: controller.exportMethods.delete_export_method + operationId: backend.controller.exportMethods.delete_export_method parameters: - name: exportMethodId in: path @@ -1045,7 +1045,7 @@ paths: post: summary: Export data with an export method tags: [Export] - operationId: controller.exportMethods.exportData + operationId: backend.controller.exportMethods.exportData parameters: - name: exportMethodId in: path @@ -1063,7 +1063,7 @@ paths: post: summary: Export a selected sample id list from an export method tags: [Export] - operationId: controller.exportMethods.exportSelection + operationId: backend.controller.exportMethods.exportSelection parameters: - name: dataProviderId in: path @@ -1103,7 +1103,7 @@ paths: get: summary: Get all Algo providers and their algorithms tags: [AlgoProviders] - operationId: controller.algoProviders.get_algo_providers + operationId: backend.controller.algoProviders.get_algo_providers responses: 200: description: Algorithms list @@ -1116,7 +1116,7 @@ paths: post: summary: Add an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.post_algo_provider + operationId: backend.controller.algoProviders.post_algo_provider parameters: - name: data in: body @@ -1144,7 +1144,7 @@ paths: delete: summary: Delete an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.delete_algo_provider + operationId: backend.controller.algoProviders.delete_algo_provider parameters: - name: name in: path @@ -1162,7 +1162,7 @@ paths: post: summary: Use an algorithm of an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.use_algo + operationId: backend.controller.algoProviders.use_algo parameters: - name: algoProviderName in: path @@ -1229,7 +1229,7 @@ paths: post: summary: Calculate pearson correlation between rows tags: [Statistical operations] - operationId: controller.statisticalOperations.pearsonCorrelation + operationId: backend.controller.statisticalOperations.pearsonCorrelation parameters: - name: data in: body @@ -1260,7 +1260,7 @@ paths: post: summary: Calculate spearman correlation between rows tags: [Statistical operations] - operationId: controller.statisticalOperations.spearmanCorrelation + operationId: backend.controller.statisticalOperations.spearmanCorrelation parameters: - name: data in: body @@ -1291,7 +1291,7 @@ paths: post: summary: Calculate mutual informations tags: [Statistical operations] - operationId: controller.statisticalOperations.mutualInformation + operationId: backend.controller.statisticalOperations.mutualInformation parameters: - name: data in: body @@ -1343,7 +1343,7 @@ paths: post: summary: Calculate the mutual information between variables tags: [Statistical operations] - operationId: controller.statisticalOperations.higherDimensionMutualInformation + operationId: backend.controller.statisticalOperations.higherDimensionMutualInformation parameters: - name: data in: body @@ -1380,7 +1380,7 @@ paths: post: summary: Calculate matrix mutual informations and the higher Dimension tags: [Statistical operations] - operationId: controller.statisticalOperations.mutualAndHigherInformation + operationId: backend.controller.statisticalOperations.mutualAndHigherInformation parameters: - name: data in: body diff --git a/backend/tests/__init__.py b/backend/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/utils/__init__.py b/backend/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/utils/layouts/__init__.py b/backend/utils/layouts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/utils/layouts/layouts.py b/backend/utils/layouts/layouts.py index 013b12b73..0fdc654e3 100644 --- a/backend/utils/layouts/layouts.py +++ b/backend/utils/layouts/layouts.py @@ -1,6 +1,6 @@ import os import json -import utils.utils as utils +import backend.utils.utils as utils import uuid diff --git a/backend/utils/widgetConfigurations/__init__.py b/backend/utils/widgetConfigurations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/utils/widgetConfigurations/widgetConfigurations.py b/backend/utils/widgetConfigurations/widgetConfigurations.py index c6be4d520..8fddf1d59 100644 --- a/backend/utils/widgetConfigurations/widgetConfigurations.py +++ b/backend/utils/widgetConfigurations/widgetConfigurations.py @@ -1,6 +1,6 @@ import os import json -import utils.utils as utils +import backend.utils.utils as utils import uuid CONF_PATH = "data/widgetConfigurations.json" diff --git a/backend/websrv.py b/backend/websrv.py index 373ab3ade..22db667d4 100644 --- a/backend/websrv.py +++ b/backend/websrv.py @@ -5,8 +5,8 @@ from flask_cors import CORS from flask import send_from_directory, request, Response from init import init -from utils.utils import get_app_version -from config.init_config import DEBUG_COLOR +from backend.utils.utils import get_app_version +from backend.config.init_config import DEBUG_COLOR DEV_FRONTEND_URL = "http://localhost:8080/" PORT = 3000 @@ -45,7 +45,7 @@ def send_frontend(path): return response except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): return ( - "You are in a development environment and the DebAI frontend " + "You are in a development environment and the DebAI frontend" + "is not available at the url : " + DEV_FRONTEND_URL, 503, diff --git a/build/lib/backend/__init__.py b/build/lib/backend/__init__.py new file mode 100644 index 000000000..4a22300fc --- /dev/null +++ b/build/lib/backend/__init__.py @@ -0,0 +1 @@ +from backend.backend import send_frontend, create_app, start_server \ No newline at end of file diff --git a/build/lib/backend/backend.py b/build/lib/backend/backend.py new file mode 100644 index 000000000..7429936da --- /dev/null +++ b/build/lib/backend/backend.py @@ -0,0 +1,80 @@ +import connexion +import os +import requests +from termcolor import colored +from flask_cors import CORS +from flask import send_from_directory, request, Response +from backend.init import init +from backend.utils.utils import get_app_version +from backend.config.init_config import DEBUG_COLOR + +DEV_FRONTEND_URL = "http://localhost:8080/" +PORT = 3000 + + +def send_frontend(path): + if path == "/": + path = "index.html" + + # If production, use the index.html from the dist folder + if os.getenv("FLASK_ENV") == "production": + return send_from_directory("dist", path) + + # In development, redirect to the DEV_FRONTEND_URL + else: + if request.method == "GET": + try: + resp = requests.get(f"{DEV_FRONTEND_URL}{path}") + excluded_headers = [ + "content-encoding", + "content-length", + "transfer-encoding", + "connection", + ] + headers = [ + (name, value) + for (name, value) in resp.raw.headers.items() + if name.lower() not in excluded_headers + ] + response = Response(resp.content, resp.status_code, headers) + return response + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return ( + "You are in a development environment and the DebAI frontend" + + "is not available at the url : " + + DEV_FRONTEND_URL, + 503, + ) + else: + print("Unexpected request method") + + +def create_app(): + app = connexion.App(__name__) + app.add_api("swagger.yaml", strict_validation=True) + CORS(app.app) + + # For serving the dashboard + @app.route("/") + def send_index(): + return send_frontend("/") + + # For serving the dashboard assets + @app.route("/") + def send_supporting_elements(path): + return send_frontend(path) + + return app + + +def start_server(): + # Run DebiAI init + print("================= DebiAI " + get_app_version() + " ====================") + init() + print("======================== RUN =======================") + print( + " DebiAI is available at " + + colored("http://localhost:" + str(PORT), DEBUG_COLOR) + ) + app = create_app() + app.run(port=PORT, debug=True) diff --git a/build/lib/backend/config/__init__.py b/build/lib/backend/config/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/config/init_config.py b/build/lib/backend/config/init_config.py new file mode 100644 index 000000000..f8663f715 --- /dev/null +++ b/build/lib/backend/config/init_config.py @@ -0,0 +1,240 @@ +from configparser import ConfigParser +from termcolor import colored + +import os + +config_path = "config/config.ini" +config_parser = ConfigParser() + +DEBUG_COLOR = "light_blue" +DEBUG_SECONDARY_COLOR = "blue" +ERROR_COLOR = "light_red" +SUCCESS_COLOR = "green" + +# Default config +config = { + "DATA_PROVIDERS_CONFIG": { + "creation": True, + "deletion": True, + }, + "INTEGRATED_DATA_PROVIDER": { + "enabled": True, + "allow_create_projects": True, + "allow_delete_projects": True, + "allow_insert_data": True, + "allow_create_selections": True, + "allow_delete_selections": True, + "allow_create_models": True, + "allow_delete_models": True, + "allow_insert_results": True, + }, + "WEB_DATA_PROVIDERS_CONFIG": { + "cache": True, + "cache_duration": 120, + }, + "WEB_DATA_PROVIDERS": { + # "name": "url" + }, + "ALGO_PROVIDERS_CONFIG": { + "enable_integrated": True, + "creation": True, + "deletion": True, + }, + "ALGO_PROVIDERS": { + # "name": "url" + }, + "EXPORT_METHODS_CONFIG": { + "creation": True, + "deletion": True, + }, + "EXPORT_METHODS": { + # "name": "type, param1, param2, ..." + }, +} + +# Env vars mapping +ENV_VAR_MAPPING = { + "DATA_PROVIDERS_CONFIG": { + "creation": "DEBIAI_DATA_PROVIDERS_ALLOW_CREATION", + "deletion": "DEBIAI_DATA_PROVIDERS_ALLOW_DELETION", + }, + "INTEGRATED_DATA_PROVIDER": { + "enabled": "DEBIAI_INTEGRATED_DATA_PROVIDER_ENABLED", + "allow_create_projects": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_PROJECTS", + "allow_delete_projects": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_PROJECTS", + "allow_insert_data": "DEBIAI_INTEGRATED_DP_ALLOW_INSERT_DATA", + "allow_create_selections": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_SELECTIONS", + "allow_delete_selections": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_SELECTIONS", + "allow_create_models": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_MODELS", + "allow_delete_models": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_MODELS", + "allow_insert_results": "DEBIAI_INTEGRATED_DP_ALLOW_INSERT_RESULTS", + }, + "WEB_DATA_PROVIDERS_CONFIG": { + "cache": "DEBIAI_WEB_DATA_PROVIDERS_CACHE_ENABLED", + "cache_duration": "DEBIAI_WEB_DATA_PROVIDERS_CACHE_DURATION", + }, + "ALGO_PROVIDERS_CONFIG": { + "enable_integrated": "DEBIAI_ALGO_PROVIDERS_ENABLE_INTEGRATED", + "creation": "DEBIAI_ALGO_PROVIDERS_ALLOW_CREATION", + "deletion": "DEBIAI_ALGO_PROVIDERS_ALLOW_DELETION", + }, + "EXPORT_METHODS_CONFIG": { + "creation": "DEBIAI_EXPORT_METHODS_ALLOW_CREATION", + "deletion": "DEBIAI_EXPORT_METHODS_ALLOW_DELETION", + }, +} + +# List of list based config sections + their env var mapping +LIST_CONFIG_SECTIONS = { + "WEB_DATA_PROVIDERS": "DEBIAI_WEB_DATA_PROVIDER_", + "ALGO_PROVIDERS": "DEBIAI_ALGO_PROVIDER_", + "EXPORT_METHODS": "DEBIAI_EXPORT_METHOD_", +} + +changes_made = False + + +def get_config_value(section, key, config_parser): + # Return the value of the key in the section of the config_parser + # Or return the ENV_VAR if it exists + + value = None + ENV_VAR = ENV_VAR_MAPPING[section][key] + + # Get the value from the config file + if section in config_parser and key in config_parser[section]: + value = str.lower(config_parser[section][key]) + + # Get the value from the environment variables + if ENV_VAR in os.environ: + value = str.lower(os.environ[ENV_VAR]) + + if value is None: + print( + " - Missing " + + colored(section, DEBUG_SECONDARY_COLOR) + + " / " + + colored(key, DEBUG_SECONDARY_COLOR) + + " in config or in " + + colored(ENV_VAR, DEBUG_SECONDARY_COLOR) + + " env var, using default" + ) + return None + + return value + + +def get_config_values(section, config_parser): + # Return a dict of the values of the section of the config_parser + # Or return the ENV_VAR if it exists + + values = {} + ENV_VAR = LIST_CONFIG_SECTIONS[section] + + # Get the value from the config file + if section in config_parser: + for key in config_parser[section]: + values[key] = str.lower(config_parser[section][key]) + + # Get the value from the environment variables + # iterate over the keys of the env var + for key in os.environ.keys(): + if key.startswith(ENV_VAR): + # Get the key name without the env var prefix + key_name = key[len(ENV_VAR) :] # noqa + values[key_name] = str.lower(os.environ[key]) + + return values + + +def set_config_value(section, key, value): + global config, changes_made + + if section in config and key in config[section]: + if config[section][key] != value: + # The default value is different from the one in the config file + config[section][key] = value + changes_made = True + + print( + " - Overriding " + + colored(section, DEBUG_COLOR) + + " / " + + colored(key, DEBUG_COLOR) + + " with value " + + colored(str(value), DEBUG_COLOR) + ) + + +def init_config(): + global config + + print("===================== CONFIG =======================") + + # Read the config file + config_parser.read(config_path) + + for section in config.keys(): + # Deal with boolean, integer and string values + for key in config[section].keys(): + # Get the value from the config file or the environment variables + value = get_config_value(section, key, config_parser) + + if value is None: + continue + + # Deal with booleans + if type(config[section][key]) is bool: + if value == "false": + set_config_value(section, key, False) + elif value == "true": + set_config_value(section, key, True) + else: + print( + colored(" [ERROR]", ERROR_COLOR) + + " Invalid boolean value for " + + colored(key, DEBUG_COLOR) + + ", using default value" + ) + continue + + # Deal with integers + elif type(config[section][key]) is int: + try: + set_config_value(section, key, int(value)) + except ValueError: + print( + colored(" [ERROR]", ERROR_COLOR) + + " Invalid integer value for " + + colored(key, DEBUG_COLOR) + + ", using default value" + ) + continue + + # Deal with strings + elif type(config[section][key]) is str: + set_config_value(section, key, str(value)) + + # Deal with list based config elements + if section in LIST_CONFIG_SECTIONS: + elements = get_config_values(section, config_parser) + + for element_name in elements: + print( + " - Adding " + + section.lower().replace("_", "-")[0:-1] + + " " + + colored(element_name, DEBUG_COLOR) + + " (" + + colored(elements[element_name], DEBUG_COLOR) + + ")" + ) + + config[section][element_name] = elements[element_name] + + if not changes_made: + print(" Default config used") + + +def get_config(): + return config diff --git a/build/lib/backend/controller/__init__.py b/build/lib/backend/controller/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/controller/algoProviders.py b/build/lib/backend/controller/algoProviders.py new file mode 100644 index 000000000..3116cf164 --- /dev/null +++ b/build/lib/backend/controller/algoProviders.py @@ -0,0 +1,70 @@ +############################################################################# +# Imports +############################################################################# +from backend.config.init_config import get_config +from backend.utils.utils import is_url_valid, is_valid_name +import backend.modules.algoProviders.algoProvidersManager as algo_provider_manager +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProvider import AlgoProvider + +############################################################################# +# Algo providers Management +############################################################################# + + +def get_algo_providers(): + algorithms = algo_provider_manager.get_algo_providers_json() + return algorithms, 200 + + +def post_algo_provider(data): + # Check if we are allowed to add AlgoProviders from the config file + config = get_config() + creation_allowed = config["ALGO_PROVIDERS_CONFIG"]["creation"] + if not creation_allowed: + return "AlgoProvider creation is not allowed", 403 + + # Check if algoProviders already exists + if algo_provider_manager.algo_provider_exists(data["name"]): + return "AlgoProvider '" + data["name"] + "' already exists", 400 + + # Check if algoProviders name is valid + if not is_valid_name(data["name"]): + return "Invalid algoProviders name", 400 + + # Add the algoProvider + # Check if url is valid + if not is_url_valid(data["url"]): + return "Invalid url", 400 + + algo_provider_manager.add(AlgoProvider(data["url"], data["name"])) + + return None, 204 + + +def use_algo(algoProviderName, algoId, data): + # Check if algoProviders exists + if not algo_provider_manager.algo_provider_exists(algoProviderName): + return "AlgoProvider " + algoProviderName + " does not exists", 404 + + try: + # Use algoProviders + algo_provider = algo_provider_manager.get_single_algo_provider(algoProviderName) + return algo_provider.use_algorithm(algoId, data), 200 + except AlgoProviderException as e: + return e.message, e.status_code + + +def delete_algo_provider(name): + # Check if we are allowed to add AlgoProviders from the config file + config = get_config() + deletion_allowed = config["ALGO_PROVIDERS_CONFIG"]["deletion"] + if not deletion_allowed: + return "AlgoProvider deletion is not allowed", 403 + + # Delete the algoProvider + try: + algo_provider_manager.delete(name) + return None, 204 + except AlgoProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/data.py b/build/lib/backend/controller/data.py new file mode 100644 index 000000000..420cb5d6e --- /dev/null +++ b/build/lib/backend/controller/data.py @@ -0,0 +1,38 @@ +############################################################################# +# Imports +############################################################################# +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException + +############################################################################# +# Data Management +############################################################################# + + +def get_data(dataProviderId, projectId, data): + # return a project data from a list of ids + sampleIds = data["sampleIds"] + analysis = data["analysis"] + + try: + # Find the data provider + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + # Ask for the data + samples = data_provider.get_samples(projectId, analysis, sampleIds) + + if samples is not None: + return { + "data": samples, + "dataMap": True, + }, 200 + + return ( + "Can't find samples for project " + + projectId + + " on data provider : " + + dataProviderId, + 404, + ) + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/dataProviders.py b/build/lib/backend/controller/dataProviders.py new file mode 100644 index 000000000..503a0fa21 --- /dev/null +++ b/build/lib/backend/controller/dataProviders.py @@ -0,0 +1,88 @@ +############################################################################# +# Imports +############################################################################# +from backend.config.init_config import get_config +from backend.modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider +from backend.utils.utils import is_url_valid +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException + +############################################################################# +# Data Providers Management +############################################################################# + + +def get_data_providers(): + data_provider_list = data_provider_manager.get_data_provider_list() + providers_formatted = [] + for data_provider in data_provider_list: + data = {} + if data_provider.type != "Python module Data Provider": + data["url"] = data_provider.url + data["status"] = data_provider.is_alive() + + data["name"] = data_provider.name + data["type"] = data_provider.type + + providers_formatted.append(data) + + return providers_formatted, 200 + + +def get_data_provider_info(dataProviderId): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + info = data_provider.get_info() + + return info, 200 + except DataProviderException as e: + return e.message, e.status_code + + +def post_data_providers(data): + # Check if we are allowed to add data providers from the config file + config = get_config() + creation_allowed = config["DATA_PROVIDERS_CONFIG"]["creation"] + if not creation_allowed: + return "Data provider creation is not allowed", 403 + + # Check if data provider already exists + if data_provider_manager.data_provider_exists(data["name"]): + return "Data provider already exists", 400 + + # Check if data provider name is valid + if not data_provider_manager.is_valid_name(data["name"]): + return "Invalid data provider name", 400 + + try: + # Add data provider + if data["type"].lower() == "web": + # Check if url is valid + if "url" not in data: + return "A url must be provided", 400 + + if not is_url_valid(data["url"]): + return "Invalid url", 400 + + data_provider_manager.add(WebDataProvider(data["url"], data["name"])) + else: + return "Invalid data provider type, valid types are: Web", 400 + + return None, 204 + except DataProviderException as e: + return e.message, e.status_code + + +def delete_data_providers(dataProviderId): + # Check if we are allowed to add data providers from the config file + config = get_config() + deletion_allowed = config["DATA_PROVIDERS_CONFIG"]["deletion"] + if not deletion_allowed: + return "Data provider deletion is not allowed", 403 + + # Delete data provider + try: + data_provider_manager.delete(dataProviderId) + return None, 204 + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/exportMethods.py b/build/lib/backend/controller/exportMethods.py new file mode 100644 index 000000000..227c40767 --- /dev/null +++ b/build/lib/backend/controller/exportMethods.py @@ -0,0 +1,51 @@ +from backend.config.init_config import get_config +import backend.modules.exportMethods.exportUtils as exportUtils + +############################################################################# +# Export API Management +############################################################################# + + +def get_export_methods(): + # ParametersCheck + return exportUtils.get_export_methods(), 200 + + +def post_export_method(data): + # Check if the creation of export methods is allowed + config = get_config() + creation_allowed = config["EXPORT_METHODS_CONFIG"]["creation"] + if not creation_allowed: + return "Export method creation is not allowed", 403 + + try: + return exportUtils.add_export_method(data), 200 + except Exception as e: + return str(e), 400 + + +def delete_export_method(exportMethodId): + # Check if the deletion of export methods is allowed + config = get_config() + deletion_allowed = config["EXPORT_METHODS_CONFIG"]["deletion"] + if not deletion_allowed: + return "Export method deletion is not allowed", 403 + + try: + return exportUtils.delete_export_method(exportMethodId), 200 + except Exception as e: + return str(e), 400 + + +def exportSelection(dataProviderId, projectId, data): + try: + return exportUtils.exportSelection(dataProviderId, projectId, data), 200 + except Exception as e: + return str(e), 400 + + +def exportData(exportMethodId, data): + try: + return exportUtils.exportData(exportMethodId, data), 200 + except Exception as e: + return str(e), 400 diff --git a/build/lib/backend/controller/layouts.py b/build/lib/backend/controller/layouts.py new file mode 100644 index 000000000..da3962a15 --- /dev/null +++ b/build/lib/backend/controller/layouts.py @@ -0,0 +1,23 @@ +############################################################################# +# Imports +############################################################################# +import backend.utils.layouts.layouts as layoutsUtils + +############################################################################# +# Analysis dashboard layout Management +############################################################################# + + +def get_layouts(): + layouts_overview = layoutsUtils.get_layouts() + return layouts_overview, 200 + + +def post_layout(data): + layoutsUtils.add_layout(data) + return None, 204 + + +def delete_layout(id): + layoutsUtils.delete_layout(id) + return None, 204 diff --git a/build/lib/backend/controller/models.py b/build/lib/backend/controller/models.py new file mode 100644 index 000000000..4137e33ea --- /dev/null +++ b/build/lib/backend/controller/models.py @@ -0,0 +1,56 @@ +############################################################################# +# Imports +############################################################################# + +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException + +############################################################################# +# MODELS Management +############################################################################# + + +def get_model_id_list(dataProviderId, projectId, modelId): + """ + Get the list of models for a project + """ + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return list(data_provider.get_model_results_id_list(projectId, modelId)), 200 + except DataProviderException as e: + return e.message, e.status_code + + +def get_results(dataProviderId, projectId, modelId, data): + """ + Get the model results from a sample list + """ + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return ( + data_provider.get_model_results(projectId, modelId, data["sampleIds"]), + 200, + ) + except DataProviderException as e: + return e.message, e.status_code + + +def post_model(dataProviderId, projectId, data): + # Create a new model + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.create_model(projectId, data) + return "model created", 200 + except DataProviderException as e: + return e.message, e.status_code + + +def delete_model(dataProviderId, projectId, modelId): + """ + Delete a model + """ + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return data_provider.delete_model(projectId, modelId), 200 + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/projects.py b/build/lib/backend/controller/projects.py new file mode 100644 index 000000000..fdfd26ee3 --- /dev/null +++ b/build/lib/backend/controller/projects.py @@ -0,0 +1,98 @@ +############################################################################# +# Imports +############################################################################# +from backend.modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager + +############################################################################# +# PROJECTS Management +############################################################################# + + +def ping(): + return "Online", 200 + + +def get_data_providers_project(dataProviderId): + # Return a list of project overviews for a specific data provider + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + if data_provider is None: + return "Data provider not found", 404 + + try: + projects = data_provider.get_projects() + + if projects is not None: + # Adding data provider id to projects + for project in projects: + project["dataProviderId"] = data_provider.name + + except DataProviderException as e: + print("Warning get DP projects : " + e.message) + + return projects, 200 + + +def get_projects(): + # Return a list of project overviews from all the data providers + data_providers_list = data_provider_manager.get_data_provider_list() + projectOverviews = [] + for data_provider in data_providers_list: + try: + projects = data_provider.get_projects() + + if projects is not None: + # Adding data provider id to projects + for project in projects: + project["dataProviderId"] = data_provider.name + + projectOverviews.extend(projects) + + except DataProviderException as e: + print("Warning get DP projects : " + e.message) + + return projectOverviews, 200 + + +def get_project(dataProviderId, projectId): + # return the info about datasets, models, selections & tags + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + project = data_provider.get_project(projectId) + + # Adding data provider id to project + project["dataProviderId"] = dataProviderId + + return project, 200 + except DataProviderException as e: + return e.message, e.status_code + + +def get_data_id_list(dataProviderId, projectId, requestParameters): + # return the list of data ids + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + data_id_list = data_provider.get_id_list( + projectId, + requestParameters["analysis"], + requestParameters["from"], + requestParameters["to"], + ) + + return data_id_list, 200 + except DataProviderException as e: + return e.message, e.status_code + + +def delete_project(dataProviderId, projectId): + # Delete a project + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + data_provider.delete_project(projectId) + return "Project deleted", 200 + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/pythonModuleDp.py b/build/lib/backend/controller/pythonModuleDp.py new file mode 100644 index 000000000..432b57944 --- /dev/null +++ b/build/lib/backend/controller/pythonModuleDp.py @@ -0,0 +1,66 @@ +from backend.modules.dataProviders.DataProviderException import DataProviderException +import backend.modules.dataProviders.dataProviderManager as data_provider_manager + + +# Project +def post_project(data): + # Ask a data provider to create a project + dataProviderId = "Python module Data Provider" + projectName = data["projectName"] + + # Check project name + if len(projectName) > 100: + return "Project name too long", 400 + + # Create project + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + + project = data_provider.create_project(projectName) + + # Adding data provider id to project + project["dataProviderId"] = dataProviderId + + return project, 200 + except DataProviderException as e: + return e.message, e.status_code + + +# Block level +def post_block_levels(dataProviderId, projectId, block_levels): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.update_block_structure(projectId, block_levels) + return block_levels, 200 + except DataProviderException as e: + return e.message, e.status_code + + +# Expected_results +def post_resultsStructure(dataProviderId, projectId, resultStructure): + # Add the expected results structure + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.update_results_structure(projectId, resultStructure) + return resultStructure, 200 + except DataProviderException as e: + return e.message, e.status_code + + +def post_block_tree(dataProviderId, projectId, data): + # Add data to a project from a tree + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return data_provider.add_block_tree(projectId, data), 200 + except DataProviderException as e: + return e.message, e.status_code + + +# Add model results +def add_results_dict(dataProviderId, projectId, modelId, data): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.add_results_dict(projectId, modelId, data) + return "Results added", 200 + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/selection.py b/build/lib/backend/controller/selection.py new file mode 100644 index 000000000..2e5ddc0e1 --- /dev/null +++ b/build/lib/backend/controller/selection.py @@ -0,0 +1,49 @@ +############################################################################# +# Imports +############################################################################# + +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException + +############################################################################# +# Selections Management +############################################################################# + + +def get_selections(dataProviderId, projectId): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return data_provider.get_selections(projectId), 200 + except DataProviderException as e: + return e.message, e.status_code + + +def get_selection_id_list(dataProviderId, projectId, selectionId): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + return data_provider.get_selection_id_list(projectId, selectionId), 200 + except DataProviderException as e: + return e.message, e.status_code + + +def post_selection(dataProviderId, projectId, data): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.create_selection( + projectId, + data["selectionName"], + data["sampleHashList"], + data["requestId"] if "requestId" in data else None, + ) + return "Selection added", 200 + except DataProviderException as e: + return e.message, e.status_code + + +def delete_selection(dataProviderId, projectId, selectionId): + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + data_provider.delete_selection(projectId, selectionId) + return "Selection deleted", 200 + except DataProviderException as e: + return e.message, e.status_code diff --git a/build/lib/backend/controller/statisticalOperations.py b/build/lib/backend/controller/statisticalOperations.py new file mode 100644 index 000000000..dcc3482d4 --- /dev/null +++ b/build/lib/backend/controller/statisticalOperations.py @@ -0,0 +1,646 @@ +import pandas as pd +import numpy as np +import numpy.random as nr + +from scipy.stats.stats import pearsonr, spearmanr +from scipy.special import digamma +import scipy.spatial as ss +from scipy.spatial.ckdtree import cKDTree +from sklearn.neighbors import NearestNeighbors +from math import log, fabs, sqrt + + +#  === Correlation matrix === +def pearsonCorrelation(data): + """Computes the Pearson's coefficient for every pair of variables provided + + Parameters + ---------- + data: list of lists where each list contains the observations of a single variable + : Array of rows with the same sizes (discrete & continuous) + + Return + ------ + result: correlation matrix along with the p-value of the significance test of the coefficients + significance level legend: 3(***) -> p-value<0.01 -> The coefficient is significant at 99% + 2(**) -> p-value<0.05 ->The coefficient is significant at 95% + 1(*) -> p-value<0.1 -> The coefficient is significant at 90% + + """ + + for i in range(len(data) - 1): + assert len(data[i]) == len( + data[i + 1] + ), "The provided samples should have the same length" + # transform the list of samples to dataframe + df = pd.DataFrame(data).transpose() + rho = df.corr() # calculate the correlation matrix + pval = df.corr(method=lambda x, y: pearsonr(x, y)[1]) - np.eye( + *rho.shape + ) # calculate the p-value + # return the number of * + p = pval.applymap(lambda x: (len([i for t in [0.01, 0.05, 0.1] if x <= t]))) + ret = rho.values.tolist() + for i in range(rho.shape[0]): + for j in range(rho.shape[1]): + ret[i][j] = [float(rho[i][j]), float(p[i][j])] + return ( + ret, + 200, + ) # pearson correlation matrix with the significance of the coefficient + + +def spearmanCorrelation(data): + """ + Computes the Spearman's coefficient for every pair of variables provided + + Parameters + ---------- + data: list of lists where each list contains the observations of a single variable + + Return + ------ + result: correlation matrix along with the p-value of the significance test of the coefficients + significance level legend: 3(***) -> p-value<0.01 -> The coefficient is significant at 99% + 2(**) -> p-value<0.05 ->The coefficient is significant at 95% + 1(*) -> p-value<0.1 -> The coefficient is significant at 90% + + """ + for i in range(len(data) - 1): + assert len(data[i]) == len( + data[i + 1] + ), "The provided samples should have the same length" + # transform the list of samples to dataframe + df = pd.DataFrame(data).transpose() + rho = df.corr(method="spearman") # calculate the correlation matrix + pval = df.corr(method=lambda x, y: spearmanr(x, y)[1]) - np.eye( + *rho.shape + ) # calculate the p-value + # return the number of significant level + p = pval.applymap(lambda x: (len([i for t in [0.01, 0.05, 0.1] if x <= t]))) + result = rho.values.tolist() + for i in range(rho.shape[0]): + for j in range(rho.shape[1]): + result[i][j] = [float(rho[i][j]), float(p[i][j])] + return result, 200 + + +# === Mutual Information === +def entropy_discrete(x, base=2): + """ + Computes the entropy of a discrete random variable + + Parameters: + ----------- + + x: List or array of one variable. + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + Return: + ------- + + Output: A float: The value of the entropy + """ + _, count = np.unique(x, return_counts=True, axis=0) + probability = count.astype(float) / len(x) + # Removing the elements which have 0 probability/weight to avoid log(0) + probability = probability[probability > 0.0] + return np.sum(-1 * probability * np.log(probability)) / np.log(base) + + +def entropy_discrete_xy(x, y, base=2): + """ + Computes the entropy of the joint distribution of two discrete random variables + + Parameters: + ----------- + x,y : Two random variables samples of the same length + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + Returns: + -------- + + Output: The value of the entropy: a float + + """ + assert len(x) == len(y), "The two provided samples should be of the same length" + xy = np.c_[x, y] + # construction of point : + # Example : + # (x,y) + # [[1. 2.] + # [2. 4.] + # [3. 5.]] + return entropy_discrete(xy, base) + + +def discrete_mutual_information(x, y, base=2): + """ + Computes the mutual information of two discrete random variables: x,y + + Parameters: + ----------- + + x,y: Two random variable samples of the same length + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + Returns: + ------- + + Output: The value of the mutual information + """ + assert len(x) == len(y), "The two provided samples should be of the same length" + return ( + entropy_discrete(x, base) + + entropy_discrete(y, base) + - entropy_discrete_xy(x, y, base) + ) + + +def continuous_mutual_information(x, y, k=1, base=2): + """ + Computes the mutual information between two continuous random variables + + Parameters: + ----------- + x,y: Data: lists or numpy arrays + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + Returns: + -------- + + Output: the mutual information + """ + x, y = np.asarray(x), np.asarray(y) + x, y = x.reshape(x.shape[0], -1), y.reshape(y.shape[0], -1) + x = x + 1e-10 * np.random.random_sample(x.shape) + y = y + 1e-10 * np.random.random_sample(y.shape) + xy = np.c_[x, y] + x_tree = cKDTree(x) + y_tree = cKDTree(y) + xy_tree = cKDTree(xy) + # query with k=k+1 to return the nearest neighbor, not counting the data point itself + dist, _ = xy_tree.query(xy, k=k + 1, p=np.inf) + epsilon = dist[:, -1] + + # for each point, count the number of neighbors + # whose distance in the x-subspace is strictly < epsilon + # repeat for the y subspace + n = len(x) + nx = np.empty(n, dtype=np.int) + ny = np.empty(n, dtype=np.int) + for ii in range(n): + if epsilon[ii] <= 1e-10: + nx[ii] = len(x_tree.query_ball_point(x_tree.data[ii], r=1e-9, p=np.inf)) - 1 + ny[ii] = len(y_tree.query_ball_point(y_tree.data[ii], r=1e-9, p=np.inf)) - 1 + else: + nx[ii] = ( + len( + x_tree.query_ball_point( + x_tree.data[ii], r=epsilon[ii] - 1e-9, p=np.inf + ) + ) + - 1 + ) + ny[ii] = ( + len( + y_tree.query_ball_point( + y_tree.data[ii], r=epsilon[ii] - 1e-9, p=np.inf + ) + ) + - 1 + ) + + mi = ( + digamma(k) - np.mean(digamma(nx + 1) + digamma(ny + 1)) + digamma(n) + ) / np.log( + base + ) # version (1) in krakow scientific paper + + return mi + + +def mixed_mutual_information(c, d, n_neighbors, base=10): + """ + Compute mutual information between continuous and discrete variables. + + Parameters + ---------- + c : ndarray, shape (n_samples,) + Samples of a continuous random variable. + d : ndarray, shape (n_samples,) + Samples of a discrete random variable. + n_neighbors : int + Number of nearest neighbors to search for each point + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + + Returns: + -------- + Output: The mutual information value + """ + n_samples = c.shape[0] + c = c.reshape((-1, 1)) + + radius = np.empty(n_samples) + label_counts = np.empty(n_samples) + k_all = np.empty(n_samples) + nn = NearestNeighbors() + for label in np.unique(d): + mask = d == label + count = np.sum(mask) + if count > 1: + k = min(n_neighbors, count - 1) + nn.set_params(n_neighbors=k) + nn.fit(c[mask]) + r = nn.kneighbors()[0] + # print(r) + radius[mask] = np.nextafter(r[:, -1], 0) + # print(radius) + k_all[mask] = k + label_counts[mask] = count + + # Ignore points with unique labels. + mask = label_counts > 1 + n_samples = np.sum(mask) + label_counts = label_counts[mask] + k_all = k_all[mask] + c = c[mask] + radius = radius[mask] + + nn.set_params(algorithm="kd_tree") + nn.fit(c) + ind = nn.radius_neighbors(radius=radius, return_distance=False) + m_all = np.array([i.size for i in ind]) + + mi = ( + digamma(n_samples) + + np.mean(digamma(k_all)) + - np.mean(digamma(label_counts)) + - np.mean(digamma(m_all + 1)) + ) + + return mi / log(base) + + +def normalise_function(normalise, mutual_information, entropy_X, entropy_Y): + """ + normalize the mutual information coefficient + Parameters: + ----------- + normalize: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + mutual_information: mutual information coefficient + entropy_X: the entropy of the first variable + entropy_Y: the entropy of the second variable + Returns: + ------- + Output: + The value of the normalized mutual information coefficient + """ + if normalise == "none": + ratio = 1 + elif normalise == "max": + ratio = max(entropy_X, entropy_Y) + elif normalise == "min": + ratio = min(entropy_X, entropy_Y) + elif normalise == "square root": + ratio = sqrt(np.abs(entropy_X * entropy_Y)) + elif normalise == "mean": + ratio = (entropy_X + entropy_Y) / 2 + else: + raise NotImplementedError( + "Variable 'normalise' takes only 'max' or 'min' or 'square root' or 'mean' or 'none'" + ) + + return mutual_information / ratio + + +def continuous_iterate_function(list_continuous, k=3, base=3, normalise="none"): + """ + Parameters: + ----------- + list_continuous: list of list of the continuous variables + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + + Returns: + ------- + Output: + an array of the mutual information between the continuous variables + """ + continuous = np.eye(len(list_continuous), len(list_continuous)).tolist() + for i in range(len(continuous)): + for j in range(i, len(continuous)): + continuous[i][j] = continuous[j][i] = normalise_function( + normalise, + continuous_mutual_information( + list_continuous[i], list_continuous[j], k=k, base=base + ), + continuous_mutual_information( + list_continuous[i], list_continuous[i], k=k, base=base + ), + continuous_mutual_information( + list_continuous[j], list_continuous[j], k=k, base=base + ), + ) + return continuous + + +def discrete_iterate_function(list_discrete, base=10, normalise="none"): + """ + Parameters: + ----------- + list_discrete: list of list of the discrete variables + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + + Returns: + ------- + Output: + an array of the mutual information between the discrete variables + """ + discrete = np.eye(len(list_discrete), len(list_discrete)).tolist() + for i in range(len(discrete)): + for j in range(i, len(discrete)): + discrete[i][j] = discrete[j][i] = normalise_function( + normalise, + discrete_mutual_information( + list_discrete[i], list_discrete[j], base=base + ), + entropy_discrete(list_discrete[i], base=base), + entropy_discrete(list_discrete[j], base=base), + ) + return discrete + + +def mixed_iterate_function( + list_continuous, list_discrete, base=10, k=3, normalise="none" +): + """ + Parameters: + ----------- + list_continuous: list of list of the continuous variables + list_discrete: list of list of the discrete variables + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + + Returns: + ------- + Output: + an array of the mutual information between the all variables: the continuous and the discrete ones + """ + mixed = np.eye(len(list_continuous), len(list_discrete)).tolist() + for i in range(len(list_continuous)): + for j in range(len(list_discrete)): + mixed[i][j] = normalise_function( + normalise, + mixed_mutual_information( + np.array(list_continuous[i]), + np.array(list_discrete[j]), + n_neighbors=k, + base=base, + ), + continuous_mutual_information( + list_continuous[i], list_continuous[i], k=k, base=base + ), + entropy_discrete(list_discrete[j], base=base), + ) + mixed = pd.DataFrame(mixed) + continuous = continuous_iterate_function( + list_continuous, k=k, base=base, normalise=normalise + ) + continuous = pd.DataFrame(continuous) + discrete = discrete_iterate_function(list_discrete, base=base, normalise=normalise) + discrete = pd.DataFrame(discrete) + part1 = np.concatenate((continuous, mixed), axis=1) + part2 = np.concatenate((mixed.T, discrete), axis=1) + result = np.concatenate((part1, part2), axis=0) + return result.tolist() + + +# @utils.traceLogLight +def mutualInformation(data): + """ + the global matrix of mutual information + Parameters: + ----------- + list_continuous: list of list of the continuous variables, if there is no + continuous variables, please send an empty list of list [[]] + list_discrete: list of list of the discrete variables,if there is no discrete + variables, please send an empty list of list [[]] + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + normalise: the choice of normalize function : takes either 'max' or 'min' + or 'square root' or 'mean' or 'none' + + Returns: + ------- + Output: + an array of the mutual information between the variables: the continuous + ones only, the discrete ones only or the continuous and the discrete ones + """ + k = data["k"] + list_continuous = data["list_continuous"] + list_discrete = data["list_discrete"] + + if k >= len(list_continuous) + len(list_discrete): + return "k must be lower than the number of variables", 403 + + if "base" in data: + base = data["base"] + else: + base = 2 + + if "normalise" in data: + normalise = data["normalise"] + acceptedNormalise = ["max", "min", "square root", "mean", "none"] + + if normalise not in acceptedNormalise: + return ( + "normalise need to be either 'max' or 'min' or 'square root' or 'mean' or 'none'", + 403, + ) + else: + normalise = "max" + + # Calculate mutual information between features + if list_continuous != [[]] and list_discrete == [[]]: + return continuous_iterate_function( + list_continuous, k=k, base=base, normalise=normalise + ) + elif list_continuous == [[]] and list_discrete != [[]]: + return discrete_iterate_function(list_discrete, base=base, normalise=normalise) + elif list_continuous != [[]] and list_discrete != [[]]: + return mixed_iterate_function( + list_continuous, list_discrete, base=base, k=k, normalise=normalise + ) + else: + return "The lists are empty", 403 + + +# === Mutual Information higher dimension === +def averageDigamma(points, dvec): + """ + This part finds number of neighbors in some radius in the marginal space + + Parameters: + ---------- + + points: the data observed + dvec: A distance vector between points + + Returns: + -------- + Output: expectation value of + + """ + + N = len(points) + tree = ss.cKDTree(points) + avg = 0.0 + for i in range(N): + dist = dvec[i] + # subtlety, we don't include the boundary point, + # but we are implicitly adding 1 to kraskov definition because center point is included + num_points = len(tree.query_ball_point(points[i], dist - 1e-15, p=float("inf"))) + avg += digamma(num_points) / N + return avg + + +# @utils.traceLogLight +def higherDimensionMutualInformation(data): + """ + This function calculates the mutual information between several continuous + variables (3, 4 variables). It takes as input a list of lists [[variable1], + [variable2], [variable3], ...], the number K, and the base, either 2 or 10 + (the unit of information is respectively bits or nats). + + It returns the mutual information between the different variables. + + Regarding its representation in the tool, we can simply create a small window + where we select the variables, K, and the base, and display only the result. + Alternatively, in your 3D plot window, we can add the result of the mutual + information of the 3 variables below the graph. + + The mutual information estimator by Kraskov et al. + ith row of X represents ith dimension of the data, e.g. X = [[1.0,3.0,3.0],[0.1,1.2,5.4]], + if X has two dimensions and we have three samples + Parameters: + ---------- + X: list of list of the variables : it could take more than 2 variables + k: the number of neighbors to consider + base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats + + Returns: + -------- + Output: the mutual information between the variables + """ + + X = data["X"] + k = data["k"] + if k >= len(X): + return "k must be < to len(X)", 403 + + if "base" in data: + base = data["base"] + else: + base = 2 + + # adding small noise to X, e.g., x<-X+noise + x = [] + for i in range(len(X)): + tem = [] + for j in range(len(X[i])): + tem.append([X[i][j] + 1e-10 * nr.rand(1)[0]]) + x.append(tem) + + points = [] + for j in range(len(x[0])): + tem = [] + for i in range(len(x)): + tem.append(x[i][j][0]) + points.append(tem) + tree = ss.cKDTree(points) + dvec = [] + for i in range(len(x)): + dvec.append([]) + for point in points: + # Find k-nearest neighbors in joint space, p=inf means max norm + knn = tree.query(point, k + 1, p=float("inf")) + points_knn = [] + for i in range(len(x)): + dvec[i].append(float("-inf")) + points_knn.append([]) + for j in range(k + 1): + for i in range(len(x)): + points_knn[i].append(points[knn[1][j]][i]) + + # Find distances to k-nearest neighbors in each marginal space + for i in range(k + 1): + for j in range(len(x)): + if dvec[j][-1] < fabs(points_knn[j][i] - points_knn[j][0]): + dvec[j][-1] = fabs(points_knn[j][i] - points_knn[j][0]) + + ret = 0.0 + for i in range(len(x)): + ret -= averageDigamma(x[i], dvec[i]) + ret += ( + digamma(k) + - (float(len(x)) - 1.0) / float(k) + + (float(len(x)) - 1.0) * digamma(len(x[0])) + ) + if base == 2: + mul = 1 / log(2) # scaling factor from nats to bits + ret *= mul + return ret + + pass + + +# === Mutual Information matrix & Mutual Information higher dimension === +def mutualAndHigherInformation(data): + """ + calculate the mutual information estimator by Kraskov et al. + and the global matrix of mutual information + """ + + k = data["k"] + list_continuous = data["list_continuous"] + list_discrete = data["list_discrete"] + columns = list_continuous + list_discrete + if "base" in data: + base = data["base"] + else: + base = 2 + + if "normalise" in data: + normalise = data["normalise"] + else: + normalise = "max" + + if k >= len(columns): + return "k need to be < len(X)", 403 + + # higherDimensionMutualInformation + print("higherDimensionMutualInformation") + hdmi = higherDimensionMutualInformation( + {"k": k, "base": base, "X": list_continuous + list_discrete} + ) + + # mutualInformation + print("mutualInformation") + mi = mutualInformation( + { + "k": k, + "base": base, + "list_continuous": list_continuous, + "list_discrete": list_discrete, + "normalise": normalise, + } + ) + + return {"higherDimensionMutualInformation": hdmi, "mutualInformation": mi}, 200 diff --git a/build/lib/backend/controller/widgetConfigurations.py b/build/lib/backend/controller/widgetConfigurations.py new file mode 100644 index 000000000..90b3e3764 --- /dev/null +++ b/build/lib/backend/controller/widgetConfigurations.py @@ -0,0 +1,28 @@ +############################################################################# +# Imports +############################################################################# +import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils + +############################################################################# +# Widget configuration Management +############################################################################# + + +def get_all_configurations(): + configurations_overview = widgetConfUtils.get_configurations_overview() + return configurations_overview, 200 + + +def get_widget_configurations(widgetKey): + configurations = widgetConfUtils.get_configurations(widgetKey) + return configurations, 200 + + +def post_configuration(widgetKey, data): + widgetConfUtils.add_configuration(widgetKey, data) + return None, 204 + + +def delete_configuration(widgetKey, id): + widgetConfUtils.delete_configuration(widgetKey, id) + return None, 204 diff --git a/build/lib/backend/init.py b/build/lib/backend/init.py new file mode 100644 index 000000000..d0f645742 --- /dev/null +++ b/build/lib/backend/init.py @@ -0,0 +1,45 @@ +# import backend.modules.dataProviders.dataProviderManager as dataProviderManager +# import backend.modules.exportMethods.exportUtils as exportUtils +# import backend.modules.algoProviders.algoProvidersManager as algoProvidersManager +# import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils +# import backend.utils.layouts.layouts as layoutsUtils +# import config.init_config as config + +from backend.modules.dataProviders import ( + dataProviderManager, +) +from backend.modules.exportMethods import ( + exportUtils, +) +from backend.modules.algoProviders import ( + algoProvidersManager, +) +from backend.utils.widgetConfigurations import ( + widgetConfigurations as widgetConfUtils, +) +from backend.utils.layouts import ( + layouts as layoutsUtils, +) +from backend.config import ( + init_config as config, +) + + +def init(): + # Init config file + config.init_config() + + # Init data providers + dataProviderManager.setup_data_providers() + + # Init AlgoProviders + algoProvidersManager.setup_algo_providers() + + # Init export methods + exportUtils.load_export_methods() + + # Init widget configurations + widgetConfUtils.setup_widget_configurations() + + # Init layouts + layoutsUtils.setup_layouts() diff --git a/build/lib/backend/modules/__init__.py b/build/lib/backend/modules/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/algoProviders/AlgoProvider.py b/build/lib/backend/modules/algoProviders/AlgoProvider.py new file mode 100644 index 000000000..d446c655a --- /dev/null +++ b/build/lib/backend/modules/algoProviders/AlgoProvider.py @@ -0,0 +1,109 @@ +# Class for AlgoProvider +import requests +import json +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException + + +class AlgoProvider: + def __init__(self, url, name): + self.url = url + self.name = name + self.alive = False + + def is_alive(self): + # Try to load algorithms + self.alive = True if self.get_algorithms() is not None else False + return self.alive + + def get_algorithms(self): + try: + r = requests.get(self.url + "/algorithms") + return get_http_response(r) + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + requests.exceptions.HTTPError, + ): + return None + + except Exception as e: + print("Error in get_algorithms") + print(e) + return None + + def to_json(self): + algorithms = None + if self.is_alive(): + algorithms = self.get_algorithms() + + return { + "name": self.name, + "url": self.url, + "status": self.alive, + "algorithms": algorithms, + } + + def use_algorithm(self, algorithm_id, data): + try: + print("Using algoProvider: " + self.url) + print("Using algorithm: " + algorithm_id) + r = requests.post( + self.url + "/algorithms/" + algorithm_id + "/run", json=data + ) + if r.raise_for_status() is None: + return get_valid_response(r) + except ( + requests.exceptions.ConnectionError, + requests.exceptions.Timeout, + ) as e: + print("The algoProvider is not reachable") + print(e) + raise AlgoProviderException("AlgoProvider not reachable", 500) + except requests.exceptions.HTTPError as e: + print("The algoProvider returned an error") + print(e) + print(e.response.text) + print(e.response.json()) + + if "detail" in e.response.json(): + raise AlgoProviderException(e.response.json()["detail"], 400) + + if e.response.status_code == 500: + raise AlgoProviderException( + "AlgoProvider internal server error: " + str(e), 500 + ) + elif e.response.status_code == 400: + raise AlgoProviderException(e.response.text, 400) + + elif e.response.status_code == 404: + raise AlgoProviderException( + "The algoProvider may not have this algorithm, " + e.response.text, + 404, + ) + else: + raise AlgoProviderException(str(e), 400) + + +# ==== Utils ==== +def get_http_response(response): + try: + if response.raise_for_status() is None: + return get_valid_response(response) + except requests.exceptions.HTTPError: + return get_error_response(response) + + +def get_valid_response(response): + if response.status_code == 204: + return True + try: + return response.json() + except json.decoder.JSONDecodeError: + return + + +def get_error_response(response): + if response.status_code == 500: + raise AlgoProviderException("AlgoProvider unexpected Error", 500) + + raise AlgoProviderException(response.text, response.status_code) diff --git a/build/lib/backend/modules/algoProviders/AlgoProviderException.py b/build/lib/backend/modules/algoProviders/AlgoProviderException.py new file mode 100644 index 000000000..8e3ebc24b --- /dev/null +++ b/build/lib/backend/modules/algoProviders/AlgoProviderException.py @@ -0,0 +1,15 @@ +# Description: Exception class for AlgoProvider +class AlgoProviderException(Exception): + message = "AlgoProvider error" + status_code = 500 + + def __init__(self, message=None, status_code=None): + super(AlgoProviderException, self).__init__(message) + + if message is not None: + self.message = message + if status_code is not None: + self.status_code = status_code + + def __str__(self): + return self.message diff --git a/build/lib/backend/modules/algoProviders/__init__.py b/build/lib/backend/modules/algoProviders/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/algoProviders/algoProvidersManager.py b/build/lib/backend/modules/algoProviders/algoProvidersManager.py new file mode 100644 index 000000000..17d2bf79d --- /dev/null +++ b/build/lib/backend/modules/algoProviders/algoProvidersManager.py @@ -0,0 +1,114 @@ +from termcolor import colored + +from backend.config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProvider import AlgoProvider +from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( + IntegratedAlgoProvider, +) + +algo_providers = [] + + +def setup_algo_providers(): + print("================== ALGO PROVIDERS ==================") + config = get_config() + config_algo_providers = config["ALGO_PROVIDERS"] + + keys = list(config_algo_providers.keys()) + values = list(config_algo_providers.values()) + + # Add AlgoProviders from config file + print(" - Loading Algo providers from config file") + for i in range(len(config_algo_providers)): + name = keys[i] + url = values[i] + + # Remove trailing slash + if url[-1] == "/": + url = url[:-1] + + print( + " - Adding AlgoProvider " + + colored(name, DEBUG_COLOR) + + " (" + + colored(url, DEBUG_COLOR) + + ")" + ) + try: + algo_provider = AlgoProvider(url, name) + algo_providers.append(algo_provider) + + if algo_provider.is_alive(): + print(colored(" [SUCCESS]", SUCCESS_COLOR) + " AlgoProvider ready") + else: + raise AlgoProviderException() + + except AlgoProviderException: + print( + colored(" [ERROR]", ERROR_COLOR) + + " AlgoProvider " + + colored(name, ERROR_COLOR) + + " is not accessible" + ) + + # Add the integrated algo provider + enable_integrated = config["ALGO_PROVIDERS_CONFIG"]["enable_integrated"] + if enable_integrated: + print(" - Adding integrated AlgoProviders") + algo_provider = IntegratedAlgoProvider() + nb_algos = len(algo_provider.get_algorithms()) + algo_providers.append(algo_provider) + + if nb_algos > 0: + print( + colored(" [SUCCESS]", SUCCESS_COLOR) + + " Integrated AlgoProvider ready with " + + str(nb_algos) + + " algorithms" + ) + else: + print(" No algorithms found") + + if len(algo_providers) == 0: + print("No Algo providers") + + +def get_algo_providers(): + return algo_providers + + +def get_algo_providers_json(): + algo_providers_json = [] + for algo_provider in algo_providers: + algo_providers_json.append(algo_provider.to_json()) + + return algo_providers_json + + +def algo_provider_exists(name): + for d in algo_providers: + if d.name == name: + return True + return False + + +def add(algo_provider): + algo_providers.append(algo_provider) + return + + +def get_single_algo_provider(name): + # Return the algo provider with the given name + for d in algo_providers: + if d.name == name: + return d + + raise AlgoProviderException("Algo provider not found", 404) + + +def delete(name): + for d in algo_providers: + if d.name == name: + algo_providers.remove(d) + return diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py new file mode 100644 index 000000000..df545d8da --- /dev/null +++ b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py @@ -0,0 +1,99 @@ +from ..utils import get_input_from_inputs + +# This algorithm is a simple classification metric calculator +# It takes a list of values corresponding to the ground truth +# and a list of values corresponding to the predictions + +# It returns a list of True/False values corresponding to +# whether the prediction is correct or not + +# It also returns the accuracy percentage and the number of correct predictions + +# Technical details (must respect the algo-api format): +algorithm_description = { + "name": "Classification Metric", + "description": """Calculates the classification error according \ +to the ground truth and the predictions""", + "author": "DebiAI", + "version": "1.0.0", + "creationDate": "2023-10-30", + "tags": ["metrics", "classification"], + "inputs": [ + { + "name": "Ground truth", + "description": "List of ground truth values", + "type": "array", + "arrayType": "text", + }, + { + "name": "Predictions", + "description": "List of predictions, must have the same \ +length as the ground truth list", + "type": "array", + "arrayType": "text", + }, + ], + "outputs": [ + { + "name": "Binary error", + "description": "Classification metric of the input list, \ +False if GDT == PRED, True otherwise", + "type": "array", + "arrayType": "boolean", + }, + { + "name": "Binary success", + "description": "Classification metric of the input list, \ +True if GDT == PRED, False otherwise", + "type": "array", + "arrayType": "boolean", + }, + { + "name": "Accuracy", + "description": "Percentage of correct predictions", + "type": "number", + }, + { + "name": "Number of correct predictions", + "type": "number", + }, + ], +} + + +def get_algorithm_details(): + return algorithm_description + + +def use_algorithm(inputs): + # Get inputs + gdt = get_input_from_inputs(inputs, "Ground truth", "array") + predictions = get_input_from_inputs(inputs, "Predictions", "array") + + # Check inputs + if len(gdt) != len(predictions): + raise TypeError("Ground truth and predictions must have the same length") + + # Calculate classification metric + binary_error = [None] * len(gdt) + binary_success = [None] * len(gdt) + nb_correct_predictions = 0 + for i in range(len(gdt)): + if gdt[i] == predictions[i]: + nb_correct_predictions += 1 + binary_error[i] = False + binary_success[i] = True + else: + binary_error[i] = True + binary_success[i] = False + + # Calculate accuracy + accuracy = nb_correct_predictions / len(binary_success) + + # Return outputs + return [ + {"name": "Binary error", "value": binary_error}, + {"name": "Binary success", "value": binary_success}, + {"name": "Accuracy", "value": accuracy}, + {"name": "Number of correct predictions", "value": nb_correct_predictions}, + ] diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py new file mode 100644 index 000000000..30315775e --- /dev/null +++ b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py @@ -0,0 +1,149 @@ +from ..utils import get_input_from_inputs + +# This algorithm is a simple regression metric calculator +# It takes a list of numbers corresponding to an error +# and a ceil corresponding to the maximum acceptable error + +# It returns a list of True/False values corresponding to +# whether the error is acceptable or not + +# It also returns the percentage of acceptable errors + +# Technical details (must respect the algo-api format): +algorithm_description = { + "name": "Regression Metric", + "description": """Calculates the regression error according to the ground truth, \ +the predictions and a ceil value""", + "author": "DebiAI", + "version": "1.0.0", + "creationDate": "2023-05-23", + "tags": ["metrics", "regression"], + "inputs": [ + { + "name": "Ground truth", + "description": "List of ground truth values", + "type": "array", + "arrayType": "number", + }, + { + "name": "Predictions", + "description": "List of predictions, must have the same length as the \ +ground truth list", + "type": "array", + "arrayType": "number", + }, + { + "name": "Ceil", + "description": "Maximum acceptable error, depends on the use case, >= 0", + "type": "number", + "default": 5, + "availableValues": [0.1, 5, 100, 10000], + "min": 0, + }, + ], + "outputs": [ + { + "name": "Error", + "description": "Difference between the ground truth and the predictions", + "type": "array", + "arrayType": "number", + }, + { + "name": "Absolute error", + "description": "Absolute value of the error", + "type": "array", + "arrayType": "number", + }, + { + "name": "Binary error", + "description": "True if Absolute error > ceil, False otherwise", + "type": "array", + "arrayType": "boolean", + }, + { + "name": "Error percentage", + "type": "number", + }, + { + "name": "Binary success", + "description": "True if Absolute error <= ceil, False otherwise", + "type": "array", + "arrayType": "boolean", + }, + { + "name": "Success percentage", + "type": "number", + }, + ], +} + + +def get_algorithm_details(): + return algorithm_description + + +def use_algorithm(inputs): + # Get inputs + gdt = get_input_from_inputs(inputs, "Ground truth", "array", "number") + predictions = get_input_from_inputs(inputs, "Predictions", "array", "number") + ceil = get_input_from_inputs(inputs, "Ceil", "number") + + # Check inputs + if ceil < 0: + raise TypeError("Ceil must be positive") + + if len(gdt) != len(predictions): + raise TypeError("Ground truth and predictions must have the same length") + + # Calculate regression metric + nb_values = len(gdt) + error = [None] * nb_values + absolute_error = [None] * nb_values + binary_error = [None] * nb_values + binary_success = [None] * nb_values + + for i in range(nb_values): + error_value = gdt[i] - predictions[i] + error[i] = error_value + absolute_error[i] = abs(error_value) + + if abs(error_value) > ceil: + binary_error[i] = True + binary_success[i] = False + else: + binary_error[i] = False + binary_success[i] = True + + # Calculate percentages + error_percentage = binary_error.count(True) / nb_values + error_percentage = round(error_percentage * 100, 2) + success_percentage = binary_success.count(True) / nb_values + success_percentage = round(success_percentage * 100, 2) + + # Return outputs + return [ + { + "name": "Error", + "value": error, + }, + { + "name": "Absolute error", + "value": absolute_error, + }, + { + "name": "Binary error", + "value": binary_error, + }, + { + "name": "Error percentage", + "value": error_percentage, + }, + { + "name": "Binary success", + "value": binary_success, + }, + { + "name": "Success percentage", + "value": success_percentage, + }, + ] diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py new file mode 100644 index 000000000..4b27ea380 --- /dev/null +++ b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -0,0 +1,103 @@ +import os +from termcolor import colored + +from backend.config.init_config import DEBUG_COLOR +from backend.modules.algoProviders.AlgoProvider import AlgoProvider +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException + + +def _get_algorithm_python(algorithm_name): + """Get the python file of the algorithm + + Args: + algorithm_name (str): Name of the algorithm + + Returns: + module: Python module of the algorithm + """ + + # Get the algorithm file + algorithm_file = None + for file in os.listdir(os.path.dirname(__file__) + "/algorithms"): + if file.endswith(".py") and file[:-3] == algorithm_name: + algorithm_file = file[:-3] + break + + # Check if the file exists + if algorithm_file is None: + raise ValueError("Algorithm " + algorithm_name + " does not exists") + + # Import the algorithm + algorithm_python = __import__( + "modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, + fromlist=["*"], + ) + + return (algorithm_name, algorithm_python) + + +class IntegratedAlgoProvider(AlgoProvider): + # Integrated AlgoProvider + # Used to expose the algorithms that are integrated + # directly in DebiAI + def __init__(self): + self.url = "/app/algo-provider" + self.name = "Integrated Algo-provider" + self.alive = True + + def is_alive(self): + return True + + def get_algorithms(self): + """Get all algorithms that DebiAI can provide + Returns: + list: List of algorithms + """ + + # List the .py files if the algorithms folder + algorithm_files = [] + for file in os.listdir(os.path.dirname(__file__) + "/algorithms"): + if file.endswith(".py") and file != "__init__.py": + algorithm_files.append(file[:-3]) + + # Import the algorithms + algorithms_python = [] + for file in algorithm_files: + print(" Importing " + colored(file, DEBUG_COLOR)) + try: + algorithms_python.append(_get_algorithm_python(file)) + except ModuleNotFoundError as e: + print("Error importing " + file) + print(e) + + # Get the algorithms (call the get_algorithm_details() function) + algorithms = [] + for algorithm in algorithms_python: + algorithm_details = algorithm[1].get_algorithm_details() + # Add the id as the file name + algorithm_details["id"] = algorithm[0] + algorithms.append(algorithm_details) + + return algorithms + + def use_algorithm(self, algorithm_id, data): + try: + print("Using integrated algo-provider") + print("Using algorithm: " + algorithm_id) + algorithm = _get_algorithm_python(algorithm_id) + + # Use the algorithm + outputs = algorithm[1].use_algorithm(data["inputs"]) + + return outputs + + except TypeError as e: + print("The integrated algo-provider returned an error") + print(e) + raise AlgoProviderException( + algorithm_id + " returned an error: " + str(e), 400 + ) + except Exception as e: + print("The integrated algo-provider returned an error") + print(e) + raise AlgoProviderException("AlgoProvider internal server error", 500) diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py new file mode 100644 index 000000000..bbb3a1271 --- /dev/null +++ b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py @@ -0,0 +1,59 @@ +def get_input_from_inputs( + inputs, input_name, expected_input_type=None, expected_list_type=None +): + # Get the input from of the inputs list from a given name + # Check the type and the subtype if needed + + for i, input in enumerate(inputs): + if "name" not in input: + raise TypeError("Input n°{} has no name".format(i)) + + if "value" not in input: + raise TypeError("Input {} has no value".format(input["name"])) + + if input["name"] == input_name: + # Check the type + if expected_input_type == "number": + if not isinstance(input["value"], (int, float)): + raise TypeError( + "Input {} is not a number, but a {}".format( + input_name, type(input["value"]) + ) + ) + elif expected_input_type == "string": + if not isinstance(input["value"], str): + raise TypeError( + "Input {} is not a string but a {}".format( + input_name, type(input["value"]) + ) + ) + elif expected_input_type == "array": + if not isinstance(input["value"], list): + raise TypeError( + "Input {} is not an array but a {}".format( + input_name, type(input["value"]) + ) + ) + + # Check the subtype + if expected_list_type == "number": + for value in input["value"]: + if not isinstance(value, (int, float)): + raise TypeError( + "Input {} is not an array of numbers but of {}".format( + input_name, type(value) + ) + ) + elif expected_list_type == "string": + for value in input["value"]: + if not isinstance(value, str): + raise TypeError( + "Input {} is not an array of strings but of {}".format( + input_name, type(value) + ) + ) + + # Return the value + return input["value"] + + raise TypeError("Input {} not found in inputs".format(input_name)) diff --git a/build/lib/backend/modules/dataProviders/DataProvider.py b/build/lib/backend/modules/dataProviders/DataProvider.py new file mode 100644 index 000000000..0d25fd65c --- /dev/null +++ b/build/lib/backend/modules/dataProviders/DataProvider.py @@ -0,0 +1,77 @@ +from abc import ABC, abstractmethod, abstractproperty + + +class DataProvider(ABC): + # Data + @abstractproperty + def name(self): + pass + + @abstractproperty + def is_alive(self): + return False + + @abstractproperty + def type(self): + pass + + # Info + @abstractmethod + def get_info(self): + pass + + # Projects + @abstractmethod + def get_projects(self): + pass + + @abstractmethod + def get_project(self, id): + pass + + @abstractmethod + def delete_project(self, _id): + pass + + # Samples + @abstractmethod + def get_id_list(self, _projectId, _analysis, _from, _to): + pass + + @abstractmethod + def get_samples(self, _projectId, _analysis, id_list): + pass + + # Selections + @abstractmethod + def get_selections(self): + pass + + @abstractmethod + def get_selection_id_list(self, id): + pass + + @abstractmethod + def create_selection(self, name, id_list): + pass + + @abstractmethod + def delete_selection(self, id): + pass + + # Models + @abstractmethod + def get_models(self): + pass + + @abstractmethod + def get_model_results_id_list(self): + pass + + @abstractmethod + def get_model_results(self, id_list): + pass + + @abstractmethod + def delete_model(self, id): + pass diff --git a/build/lib/backend/modules/dataProviders/DataProviderException.py b/build/lib/backend/modules/dataProviders/DataProviderException.py new file mode 100644 index 000000000..5ebe02f2b --- /dev/null +++ b/build/lib/backend/modules/dataProviders/DataProviderException.py @@ -0,0 +1,15 @@ +# Description: Exception class for data providers +class DataProviderException(Exception): + message = "Data provider error" + status_code = 500 + + def __init__(self, message=None, status_code=None): + super(DataProviderException, self).__init__(message) + + if message is not None: + self.message = message + if status_code is not None: + self.status_code = status_code + + def __str__(self): + return self.message diff --git a/build/lib/backend/modules/dataProviders/__init__.py b/build/lib/backend/modules/dataProviders/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/dataProviderManager.py b/build/lib/backend/modules/dataProviders/dataProviderManager.py new file mode 100644 index 000000000..2b651dbb7 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/dataProviderManager.py @@ -0,0 +1,127 @@ +from termcolor import colored + +from backend.config.init_config import ( + get_config, + DEBUG_COLOR, + ERROR_COLOR, + SUCCESS_COLOR, +) +from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( + WebDataProvider, +) +from backend.modules.dataProviders.pythonDataProvider.PythonDataProvider import ( + PythonDataProvider, + PYTHON_DATA_PROVIDER_ID, +) +from backend.modules.dataProviders.DataProviderException import DataProviderException + +data_providers_list = [] +python_data_provider_disabled = True + + +def setup_data_providers(): + global python_data_provider_disabled + print("================== DATA PROVIDERS ==================") + config = get_config() + web_data_provider_config = config["WEB_DATA_PROVIDERS"] + python_module_data_provider_config = config["INTEGRATED_DATA_PROVIDER"] + + keys = list(web_data_provider_config.keys()) + values = list(web_data_provider_config.values()) + + # Web Data Providers + for i in range(len(web_data_provider_config)): + name = keys[i] + url = values[i] + + # Remove trailing slash + if url[-1] == "/": + url = url[:-1] + + print( + " - Adding external data Provider " + + colored(name, DEBUG_COLOR) + + " (" + + url + + ")" + ) + try: + data_provider = WebDataProvider(url, name) + add(data_provider) + + if data_provider.is_alive(): + print(colored(" [SUCCESS]", SUCCESS_COLOR) + " Data Provider ready") + else: + raise DataProviderException() + except DataProviderException: + print( + colored(" [ERROR]", ERROR_COLOR) + + " : Data Provider " + + colored(name, ERROR_COLOR) + + " is not accessible" + ) + # Python Data Providers + if python_module_data_provider_config["enabled"]: + print(" - Adding Python Module data Provider") + add(PythonDataProvider()) + python_data_provider_disabled = False + + if len(data_providers_list) == 0: + print(" No data providers configured") + + +def data_provider_exists(name): + for d in data_providers_list: + if d.name == name: + return True + return False + + +def is_valid_name(name): + # /, &, | are not allowed in data provider names + if ( + "/" in name + or "&" in name + or "|" in name + or len(name) == 0 + or len(name) > 50 + or name[0] == " " + or name[-1] == " " + ): + return False + + return True + + +def add(data_provider): + data_providers_list.append(data_provider) + return + + +def get_data_provider_list(): + return data_providers_list + + +def get_single_data_provider(name): + # Check if the data provider is not disabled + if name == PYTHON_DATA_PROVIDER_ID and python_data_provider_disabled: + raise DataProviderException("Python module data provider is disabled", 403) + + # Return the data provider with the given name + for d in data_providers_list: + if d.name == name: + return d + + raise DataProviderException("Data provider not found", 404) + + +def delete(name): + for d in data_providers_list: + if d.name == name: + if d.type == "Python module Data Provider": + raise DataProviderException( + "Python module data provider cannot be deleted", 403 + ) + + data_providers_list.remove(d) + return diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py new file mode 100644 index 000000000..c143d7a76 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py @@ -0,0 +1,250 @@ +from backend.config.init_config import get_config +from backend.modules.dataProviders.DataProvider import DataProvider +from backend.modules.dataProviders.DataProviderException import DataProviderException +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + projects, + samples, + selections, + models, + tree, +) + +from backend.utils.utils import get_app_version + +PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" + + +# Wrappers +def project_must_exist(func): + def wrapper(*args, **kwargs): + if len(args) < 2: + raise Exception("Project id must be provided as first argument") + + project_id = args[1] + + if not projects.project_exist(project_id): + raise DataProviderException("Project " + project_id + " not found", 404) + + return func(*args, **kwargs) + + return wrapper + + +class PythonDataProvider(DataProvider): + # Generic functions + def __init__(self): + pythonModuleUtils.init() + nb_projects = len(projects.get_projects()) + print( + " Python module Data Provider initialized with " + + str(nb_projects) + + " projects" + ) + + @property + def name(self): + return PYTHON_DATA_PROVIDER_ID + + @property + def type(self): + return PYTHON_DATA_PROVIDER_ID + + def is_alive(self): + return True + + def get_info(self): + # Request method to get info on data Provider + # return Object { version, dp_name, nb_Sample_max(to load)} + return { + "version": get_app_version(), + "maxSampleIdByRequest": 10000, + "maxSampleDataByRequest": 2000, + "maxResultByRequest": 5000, + "canDelete": { + "projects": True, + "selections": True, + "models": True, + }, + } + + # Projects + def get_projects(self): + # Request method to get projects overview + # Return Arr[object{ id, name, nb_samples, nb_models, nb_selections, + # update_time, creation_time}] + return projects.get_projects() + + def create_project(self, name): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_projects"] + if not creation_allowed: + raise DataProviderException("Project creation is not allowed", 403) + + # Project must not already exist + if projects.project_exist(name): + raise DataProviderException("Project already exists", 400) + + return projects.create_project(name, name) + + @project_must_exist + def get_project(self, project_id): + # Request method to get projects overview + # Return object{ id, name, nb_samples, nb_models, nb_selections, + # update_time, creation_time} + + project_base_info = projects.get_project(project_id) + project_base_info["selections"] = selections.get_selections(project_id) + project_base_info["resultStructure"] = projects.get_result_structure(project_id) + project_base_info["models"] = models.get_models(project_id) + return project_base_info + + @project_must_exist + def delete_project(self, project_id): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_projects"] + if not creation_allowed: + raise DataProviderException("Project deletion is not allowed", 403) + + # Request method to delete project + projects.delete_project(project_id) + + # Id list + @project_must_exist + def get_id_list(self, project_id, analysis, _from=None, _to=None): + # Get id list + # Return Arr[id] + return samples.get_all_samples_id_list(project_id, _from, _to) + + @project_must_exist + def get_samples(self, project_id, analysis, id_list): + # Get full data from id list + # Return object { id: [data]} + return samples.get_data_from_sample_id_list(project_id, id_list) + + # Selections + @project_must_exist + def get_selections(self, project_id): + # Get selections on project + # Return arr[object{ id, name, creation_time, nb_samples}] + return selections.get_selections(project_id) + + @project_must_exist + def get_selection_id_list(self, project_id, selection_id): + # Get selections id for a project + # Return selection ID list + return selections.get_selection_id_list(project_id, selection_id) + + @project_must_exist + def create_selection(self, project_id, name, id_list, request_id=None): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_selections"] + if not creation_allowed: + raise DataProviderException("Selection creation is not allowed", 403) + + # Selection creation + return selections.create_selection(project_id, name, id_list, request_id) + + @project_must_exist + def delete_selection(self, project_id, selection_id): + # Check config + config = get_config() + deletion_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_selections"] + if not deletion_allowed: + raise DataProviderException("Selection deletion is not allowed", 403) + + # Selection deletion + return selections.delete_selection(project_id, selection_id) + + # Models + @project_must_exist + def get_models(self, project_id): + return models.get_models(project_id) + + @project_must_exist + def get_model_results_id_list(self, project_id, model_id): + return models.get_model_id_list(project_id, model_id) + + @project_must_exist + def get_model_results(self, project_id, model_id, id_list): + return models.get_model_results(project_id, model_id, id_list) + + # Python module specific functions + + @project_must_exist + def update_block_structure(self, project_id, blockStructure): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_projects"] + if not creation_allowed: + raise DataProviderException("Project creation is not allowed", 403) + + # Update block structure + projects.update_block_structure(project_id, blockStructure) + + @project_must_exist + def add_block_tree(self, project_id, data): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_data"] + if not creation_allowed: + raise DataProviderException("Data insertion is not allowed", 403) + + # Insert data + return tree.add_block_tree(project_id, data) + + @project_must_exist + def update_results_structure(self, project_id, resultsStructure): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_results"] + if not creation_allowed: + raise DataProviderException("Results insertion is not allowed", 403) + + # TODO : check resultStructure (type and default type ==) + existing_result_structure = projects.get_result_structure(project_id) + if existing_result_structure is not None: + raise DataProviderException( + "project " + project_id + " already have a results structure", 403 + ) + + projects.update_results_structure(project_id, resultsStructure) + + @project_must_exist + def create_model(self, project_id, data): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_models"] + if not creation_allowed: + raise DataProviderException("Model creation is not allowed", 403) + + models.create_model( + project_id, data["name"], data["metadata"] if "metadata" in data else None + ) + + @project_must_exist + def delete_model(self, project_id, model_id): + # Check config + config = get_config() + deletion_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_models"] + if not deletion_allowed: + raise DataProviderException("Model deletion is not allowed", 403) + + # Check if model exist + if not models.model_exist(project_id, model_id): + raise DataProviderException("Model does not exist", 404) + + models.delete_model(project_id, model_id) + + @project_must_exist + def add_results_dict(self, project_id, model_id, data): + # Check config + config = get_config() + creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_results"] + if not creation_allowed: + raise DataProviderException("Results insertion is not allowed", 403) + + models.add_results_dict(project_id, model_id, data) diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py new file mode 100644 index 000000000..844a309fa --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py @@ -0,0 +1,60 @@ +import hashlib +import ujson as json + +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils + +DATA_PATH = pythonModuleUtils.DATA_PATH + + +def hash(text: str): + return hashlib.sha256(text.encode("utf-8")).hexdigest() + + +# hash +def __createProjectHashMap(projectId, blockPath, hashmap, sampleLevel, currentLevel): + blockPath += "/" + if currentLevel == sampleLevel: + # We are at the sample level, we can fill the hashmap + sampleHash = hash(blockPath) + hashmap[sampleHash] = blockPath + + # Update the sample + pythonModuleUtils.updateJsonFile( + DATA_PATH + projectId + "/blocks/" + blockPath + "info.json", + "id", + sampleHash, + ) + return + + for children in pythonModuleUtils.listDir( + DATA_PATH + projectId + "/blocks/" + blockPath + ): + __createProjectHashMap( + projectId, blockPath + children, hashmap, sampleLevel, currentLevel + 1 + ) + + +def addToSampleHashmap(projectId, hashMap): + with open(DATA_PATH + projectId + "/samplesHashmap.json") as json_file: + existingHm = json.load(json_file) + + existingHm.update(hashMap) + + pythonModuleUtils.writeJsonFile( + DATA_PATH + projectId + "/samplesHashmap.json", existingHm + ) + + +def getHashmap(projectId): + with open(DATA_PATH + projectId + "/samplesHashmap.json") as json_file: + existingHm = json.load(json_file) + + return existingHm + + +def getPathFromHashList(projectId, hashArray): + hm = getHashmap(projectId) + ret = [] + for hash in hashArray: + ret.append(hm[hash]) + return ret diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py new file mode 100644 index 000000000..075df2087 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py @@ -0,0 +1,281 @@ +import os +import ujson as json +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + projects, + tree, +) +from backend.modules.dataProviders.DataProviderException import DataProviderException + +DATA_PATH = pythonModuleUtils.DATA_PATH + + +#  Models +def get_model_ids(project_id): + return os.listdir(DATA_PATH + project_id + "/models/") + + +def get_models(project_id): + ret = [] + for model in os.listdir(DATA_PATH + project_id + "/models/"): + with open( + DATA_PATH + project_id + "/models/" + model + "/info.json" + ) as json_file: + info = json.load(json_file) + ret.append( + { + "name": model, + "id": model, + "creationDate": info["creationDate"], + "updateDate": info["updateDate"], + "version": "0.0.0", + "metadata": info["metadata"], + "nbResults": info["nbResults"], + } + ) + + return ret + + +def model_exist(project_id, model_id): + return model_id in get_model_ids(project_id) + + +def create_model(project_id, model_name, metadata=None): + # ParametersCheck + if not pythonModuleUtils.is_filename_clean(model_name): + raise DataProviderException("Model name contain invalid characters", 402) + + model_id = model_name + + if model_exist(project_id, model_id): + raise DataProviderException("Model " + model_id + " already exists", 409) + + if metadata is None: + metadata = {} + + # model + modelFolderPath = DATA_PATH + project_id + "/models/" + model_id + os.mkdir(modelFolderPath) + + now = pythonModuleUtils.timeNow() + + model_info = { + "name": model_id, + "id": model_id, + "creationDate": now, + "updateDate": now, + "metadata": metadata, + "nbResults": 0, + } + + pythonModuleUtils.writeJsonFile(modelFolderPath + "/info.json", model_info) + + # Add 0 results to init the file + write_model_results(project_id, model_id, {}) + + +def delete_model(project_id, model_id): + pythonModuleUtils.deleteDir(DATA_PATH + project_id + "/models/" + model_id) + + +def write_model_results(project_id, model_id, results): + pythonModuleUtils.writeJsonFile( + DATA_PATH + project_id + "/models/" + model_id + "/results.json", results + ) + projects.update_project(project_id) + + +def get_model_results(project_id, model_id, sample_ids): + # Check parameters + if not projects.project_exist(project_id): + raise ("Project '" + project_id + "' doesn't exist") + if not model_exist(project_id, model_id): + raise ("Model " + model_id + " does not exist") + + # Get model results + with open( + DATA_PATH + project_id + "/models/" + model_id + "/results.json", "r" + ) as jsonFile: + model_results = json.load(jsonFile) + + # if not selection_id: + # return d + # else: + # selectionSamples = set( + # selections.getSelectionSamples(project_id, selection_id)) + # return selectionSamples.intersection_update(d) + # model_results = getModelResults(project_id, model_id) + + ret = {} + for sample_id in sample_ids: + if sample_id in model_results: + ret[sample_id] = model_results[sample_id] + # Not sending error if sample not found in model results at the moment + # else: + # raise ValueError("Sample " + sample_id + + # " not found in model results") + return ret + + +def get_model_id_list(project_id, model_id) -> list: + # Get model results + with open( + DATA_PATH + project_id + "/models/" + model_id + "/results.json", "r" + ) as jsonFile: + model_results = json.load(jsonFile) + return dict.keys(model_results) + + +# def get_model_list_results(project_id, model_ids: list, common: bool) -> list: +# samples = set(get_model_results(project_id, model_ids[0])) + +# for model_id in model_ids[1:]: +# if common: # Common samples between models +# samples.intersection_update( +# get_model_results(project_id, model_id)) +# else: # Union of the model results samples +# samples = samples.union(get_model_results(project_id, model_id)) + +# return list(samples) + + +def add_results_dict(project_id, modelId, data): + tree = data["results"] + + # Check parameters + if not projects.project_exist(project_id): + raise "Project '" + project_id + "' doesn't exist" + + if not model_exist(project_id, modelId): + raise ( + "Model '" + modelId + "' in project : '" + project_id + "' doesn't exist" + ) + + # Get resultStructure & project_block_structure + result_structure = projects.get_result_structure(project_id) + if result_structure is None: + raise ( + "The project expected results need to be specified before adding results" + ) + + if "expected_results_order" in data: + expected_results_order = data["expected_results_order"] + else: + expected_results_order = list(map(lambda r: r["name"], result_structure)) + + project_block_structure = projects.get_project_block_level_info(project_id) + sampleIndex = len(project_block_structure) - 1 + + # Check the given expected_results_order + for expected_result in result_structure: + if expected_result["name"] not in expected_results_order: + raise ( + "The expected result '" + + expected_result["name"] + + "' is missing from the expected_results_order Array" + ) + + giv_exp_res = {} + for given_expected_result in expected_results_order: + result_expected = False + for i, expected_result in enumerate(result_structure): + if given_expected_result == expected_result["name"]: + result_expected = True + + # Map the expected_results_order indexes to result_structure + giv_exp_res[given_expected_result] = i + + if not result_expected: + return ( + "The given expected result '" + + given_expected_result + + "' is not an expected result", + 403, + ) + + #  Check if all blocks referenced in the result tree exists + resultsToAdd = {} + + for blockKey in tree: + ok, msg = __check_blocks_of_tree_exists( + project_id, + result_structure, + giv_exp_res, + tree[blockKey], + 0, + sampleIndex, + blockKey, + resultsToAdd, + ) + if not ok: + print(msg) + return msg, 403 + + # The given tree is compliant, let's add the results + newResults = pythonModuleUtils.addToJsonFIle( + DATA_PATH + project_id + "/models/" + modelId + "/results.json", resultsToAdd + ) + + pythonModuleUtils.addToJsonFIle( + DATA_PATH + project_id + "/models/" + modelId + "/info.json", + {"nbResults": len(newResults), "updateDate": pythonModuleUtils.timeNow()}, + ) + projects.update_project(project_id) + return 200 + + +def __check_blocks_of_tree_exists( + project_id: str, + result_structure: list, + giv_exp_res: dict, + block: dict, + level: int, + sampleIndex: int, + path: str, + resultsToAdd: dict, +): + # Check block exist in the data + blockInfo = tree.findBlockInfo(project_id, path) + if not blockInfo: + return ( + False, + "Error while adding the results : block '" + path + "' doesn't exist", + ) + + if level == sampleIndex: + path += "/" + resultsToAdd[blockInfo["id"]] = [] + #  Sample level : the results : they need to be verified + if len(block) != len(giv_exp_res): + raise ValueError( + "in : " + + path + + ", " + + str(len(block)) + + " value where given but " + + str(len(giv_exp_res)) + + "where expected" + ) + + for result in result_structure: + resultsToAdd[blockInfo["id"]].append(block[giv_exp_res[result["name"]]]) + # TODO Deal with defaults results and check type + + return True, None + + for subBlockKey in block: + ok, msg = __check_blocks_of_tree_exists( + project_id, + result_structure, + giv_exp_res, + block[subBlockKey], + level + 1, + sampleIndex, + path + "/" + str(subBlockKey), + resultsToAdd, + ) + if not ok: + return False, msg + + return True, "" diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py new file mode 100644 index 000000000..dcecd3f8a --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py @@ -0,0 +1,256 @@ +import os +import shutil +import ujson as json + +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash + +DATA_PATH = pythonModuleUtils.DATA_PATH + + +def project_exist(projectId): + return projectId in os.listdir(DATA_PATH) + + +def get_project(projectId): + try: + # Json info file + if not os.path.exists(DATA_PATH + projectId + "/info.json"): + raise Exception('The "info.json" file is missing') + + with open(DATA_PATH + projectId + "/info.json") as json_file: + data = json.load(json_file) + + if "name" not in data: + raise Exception("The project name is missing from the info.json file") + + if "creationDate" not in data: + raise Exception( + "The project creationDate is missing from the info.json file" + ) + + if "updateDate" not in data: + raise Exception("The project updateDate is missing from the info.json file") + + name = data["name"] + creationDate = data["creationDate"] + updateDate = data["updateDate"] + + # Nb models + if not os.path.exists(DATA_PATH + projectId + "/models/"): + raise Exception('The "models" folder is missing') + + nbModels = len(os.listdir(DATA_PATH + projectId + "/models/")) + + # Nb selection + if not os.path.exists(DATA_PATH + projectId + "/selections/"): + raise Exception('The "selections" folder is missing') + + nbSelection = len(os.listdir(DATA_PATH + projectId + "/selections/")) + + # Nb samples + if not os.path.exists(DATA_PATH + projectId + "/samplesHashmap.json"): + raise Exception('The "samplesHashmap.json" file is missing') + + nbSamples = len(hash.getHashmap(projectId)) + + # project columns + projectColumns = get_project_columns(projectId) + + # project block level + # We still need to get the project block level, the Python module use it + projectBlockLevel = get_project_block_level_info(projectId) + + projectOverview = { + "id": projectId, + "name": name, + "nbModels": nbModels, + "nbSelections": nbSelection, + "nbSamples": nbSamples, + "creationDate": creationDate, + "updateDate": updateDate, + "columns": projectColumns, + "blockLevelInfo": projectBlockLevel, + } + + except Exception as e: + print("Error while getting the project overview: " + projectId) + print(e) + projectOverview = { + "id": projectId, + "name": projectId, + } + + return projectOverview + + +def get_projects(): + project = [] + + for projectId in os.listdir(DATA_PATH): + project.append(get_project(projectId)) + + return project + + +def create_project(projectId, projectName): + # Create the project files and folders + os.mkdir(DATA_PATH + projectId) + os.mkdir(DATA_PATH + projectId + "/blocks") + os.mkdir(DATA_PATH + projectId + "/models") + os.mkdir(DATA_PATH + projectId + "/selections") + + now = pythonModuleUtils.timeNow() + projectInfo = { + "name": projectName, + "id": projectId, + "creationDate": now, + "updateDate": now, + "blockLevelInfo": [], + } + + pythonModuleUtils.writeJsonFile(DATA_PATH + projectId + "/info.json", projectInfo) + pythonModuleUtils.writeJsonFile(DATA_PATH + projectId + "/samplesHashmap.json", {}) + + return projectInfo + + +def update_project(projectId): + # Change the update date of the project to now + pythonModuleUtils.updateJsonFile( + DATA_PATH + projectId + "/info.json", "updateDate", pythonModuleUtils.timeNow() + ) + + +def get_project_block_level_info(projectId): + if not os.path.isfile(DATA_PATH + projectId + "/info.json"): + raise Exception( + "The project '" + projectId + "' doesn't have an info.json file" + ) + + with open(DATA_PATH + projectId + "/info.json") as json_file: + return json.load(json_file)["blockLevelInfo"] + + +def get_project_columns(projectId): + block_level_info = get_project_block_level_info(projectId) + + # Convert the block level info to the new columns format + # blockLevelInfo: + # [ + # { "name": "block1" }, + # { + # "name": "block2", + # "contexts": [ + # { "name": "cont1", "type": "text", group:"group_1" }, + # { "name": "cont2", "type": "text", group:"group_1" }, + # ] + # }, + # { "name": "block3", "contexts": [ + # { "name": "cont3", "type": "text", group:"group_1" } + # ] + # }, + # { + # "name": "block4", + # "others": [{ "name": "other1", "type": "number" }], + # "groundTruth": [ + # { "name": "gdt1", "type": "number" }, + # { "name": "gdt2", "type": "number" }, + # ], + # "inputs": [ + # { "name": "inp1", "type": "number" } + # ] + # } + # ] + + # Goal format: + # [ + # { "name": "block1", "category": "other", "type": "auto" }, + # { "name": "block2", "category": "other", "type": "auto" }, + # { "name": "cont1", "category": "context", "type": "text", group: "group_1" }, + # { "name": "cont2", "category": "context", "type": "text", group: "group_1" }, + # { "name": "cont3", "category": "context", "type": "text", group: "group_1" }, + # { "name": "block3", "category": "other", "type": "auto" }, + # { "name": "other1", "category": "other", "type": "number" }, + # { "name": "block4", "category": "other", "type": "auto" }, + # { "name": "gdt1", "category": "groundtruth", "type": "number" }, + # { "name": "gdt2", "category": "groundtruth", "type": "number" }, + # { "name": "inp1", "category": "input", "type": "number" }, + # ] + + project_columns = [] + + def create_column(col, category): + column = {"name": col["name"], "category": category, "type": col["type"]} + + if "group" in col: + column["group"] = col["group"] + + return column + + for block in block_level_info: + block_name = block["name"] + project_columns.append( + {"name": block_name, "category": "other", "type": "auto"} + ) + + if "groundTruth" in block: + for ground_truth in block["groundTruth"]: + project_columns.append(create_column(ground_truth, "groundtruth")) + + if "contexts" in block: + for context in block["contexts"]: + project_columns.append(create_column(context, "context")) + + if "inputs" in block: + for input in block["inputs"]: + project_columns.append(create_column(input, "input")) + + if "others" in block: + for other in block["others"]: + project_columns.append(create_column(other, "other")) + + return project_columns + + +def get_result_structure(projectId): + with open(DATA_PATH + projectId + "/info.json") as json_file: + projectInfo = json.load(json_file) + if "resultStructure" in projectInfo: + return projectInfo["resultStructure"] + else: + return None + + +def delete_project(projectId): + # Delete the project files and folders + try: + shutil.rmtree(DATA_PATH + projectId) + except Exception as e: + print(e) + raise "Something went wrong when deleting the project" + + +def update_block_structure(projectId, blockStructure): + try: + pythonModuleUtils.updateJsonFile( + DATA_PATH + projectId + "/info.json", "blockLevelInfo", blockStructure + ) + + update_project(projectId) + except Exception as e: + print(e) + raise Exception("Something went wrong updating project structure") + + +def update_results_structure(projectId, resultStructure): + try: + # save resultStructure + pythonModuleUtils.updateJsonFile( + DATA_PATH + projectId + "/info.json", "resultStructure", resultStructure + ) + update_project(projectId) + return resultStructure, 200 + + except Exception as e: + print(e) + raise "Something went wrong updating project structure" diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py new file mode 100644 index 000000000..f98261689 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py @@ -0,0 +1,121 @@ +import ujson as json +import os +import posixpath +import unicodedata +import string +import shutil +import time + +DATA_PATH = "data/pythonDataProvider/" + +DATA_TYPES = ["groundTruth", "contexts", "inputs", "others"] + + +# Init, called at the server start +def init(): + # Create the projects data directory + try: + os.makedirs(DATA_PATH) + except FileExistsError: + # Data already initiated + pass + + +# File name verifications +def clean_filename(filename): + # replace spaces + filename = filename.replace(" ", "_") + + # keep only valid ascii chars + cleaned_filename = ( + unicodedata.normalize("NFKD", filename).encode("ASCII", "ignore").decode() + ) + + # keep only whitelisted chars + whitelist = "_-() %s%s" % (string.ascii_letters, string.digits) + char_limit = 255 + + cleaned_filename = "".join(c for c in cleaned_filename if c in whitelist) + return cleaned_filename[:char_limit] + + +def is_filename_clean(filename): + cleanFilename = "".join(i for i in filename if i not in "\/:*?<>|") # noqa + return filename == cleanFilename + + +def is_secure_path(path): + path = posixpath.normpath(path) + return not path.startswith(("/", "../")) + + +# directories and file Manipulation +def fileExist(path): + return os.path.isfile(path) + + +def listDir(path): + # List the directories only + return [ + name for name in os.listdir(path) if os.path.isdir(os.path.join(path, name)) + ] + + +def deleteFile(filePath): + os.remove(filePath) + + +def deleteDir(dirPath): + deleteFiles = [] + deleteDirs = [] + for root, dirs, files in os.walk(dirPath): + for f in files: + deleteFiles.append(os.path.join(root, f)) + for d in dirs: + deleteDirs.append(os.path.join(root, d)) + for f in deleteFiles: + os.remove(f) + for d in deleteDirs: + os.rmdir(d) + os.rmdir(dirPath) + + +def copyDir(src, dest): + shutil.copytree(src, dest) + + +# Json files +def readJsonFile(path): + with open(path, "r") as jsonFile: + return json.load(jsonFile) + + +def writeJsonFile(path, obj): + with open(path, "w") as outfile: + json.dump(obj, outfile) + + +def updateJsonFile(path, key, data): + with open(path, "r") as jsonFile: + d = json.load(jsonFile) + + d[key] = data + + with open(path, "w") as jsonFile: + json.dump(d, jsonFile) + return d + + +def addToJsonFIle(path, dictToAdd: dict): + with open(path, "r") as jsonFile: + d = json.load(jsonFile) + + d = {**d, **dictToAdd} + with open(path, "w") as jsonFile: + json.dump(d, jsonFile) + return d + + +# Date +def timeNow(): + return time.time() * 1000 diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py new file mode 100644 index 000000000..6ee3676ef --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py @@ -0,0 +1,130 @@ +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + tree, + hash, +) + +DATA_PATH = pythonModuleUtils.DATA_PATH +DATA_TYPES = pythonModuleUtils.DATA_TYPES + +# ID list + + +def get_all_samples_id_list(project_id, _from=None, _to=None): + """ + Return a list of all samples id in a project + """ + # Get the hashmap + hashmap = hash.getHashmap(project_id) + + # Get all samples + samples = list(hashmap.keys()) + + # In case of streaming purpose + if _from is not None and _to is not None: + samples = samples[_from : _to + 1] # noqa + + return samples + + +# Get data +def get_data_from_sample_id_list(project_id, id_list): + # Get path of the samples from the hashmap + sample_path = hash.getPathFromHashList(project_id, id_list) + data = {} + + # We age going through each samples individually because of a bug + # (the data aren't aligned with the requested samples id) + # Because of this bug, we are slowing down the process + # TODO : fix this bug + for i in range(len(id_list)): + # Get tree from samples + samples_tree = tree.getBlockTreeFromSamples(project_id, [sample_path[i]]) + + # Convert tree to array + data_array = _tree_to_array(samples_tree) + + # Convert array to dict + data[id_list[i]] = data_array[0] + + return data + + +def _tree_to_array(tree): + data_array = [] + for block in tree: + data_array += _block_to_array_recur(block) + return data_array + + +def _get_block_values(block): + # Adding the block name into the values + values = [block["name"]] + + # store all key-values into an array + for data_type in DATA_TYPES: + if data_type in block: + for key in range(len(block[data_type])): + values.append(block[data_type][key]) + + return values + + +def _block_to_array_recur(block): + # Getting bloc values + values = _get_block_values(block) + if "childrenInfoList" not in block or len(block["childrenInfoList"]) == 0: + return [values] + + else: + # Getting all child values + child_values = [] + for child_block in block["childrenInfoList"]: + child_values += _block_to_array_recur(child_block) + # child_values.append(_block_to_array_recur(child_block)) + + # Child values : [[1,2,3], [4,5,6], [7,8,9]] + # values : [10, 11, 12] + # Goal: [[10, 11, 12, 1, 2, 3], [10, 11, 12, 4, 5, 6], [10, 11, 12, 7, 8, 9]] + + # Adding the block name into the values + ret = [None] * len(child_values) + + # Adding the block values to the children values + for i in range(len(child_values)): + ret[i] = values + child_values[i] + + return ret + + +# def projectSamplesGenerator(projectId): +# """ +# Generator used to iterate over all samples in a project. +# Used by the 'createSelectionFromRequest' method +# """ + +# # Get the project block structure +# projectBlockStructure = projects.get_project_block_level_info(projectId) +# sampleLevel = len(projectBlockStructure) - 1 + +# rootBlocks = utils.listDir(DATA_PATH + projectId + "/blocks/") +# for rootBlock in rootBlocks: +# path = DATA_PATH + projectId + "/blocks/" + rootBlock + "/" +# yield from yieldSample(path, 0, [], sampleLevel, projectBlockStructure) +# print("end") + + +# def yieldSample(path, level, sampleInfo, sampleLevel, blockLevelInfo): +# # TODO : optimizations : add in parameters the block that we need to open +# blockInfo = utils.readJsonFile(path + "info.json") +# sampleInfo.append(getBlockInfo(blockLevelInfo[level], blockInfo)) + +# if level == sampleLevel: +# # merge the dict into one +# yield {k: v for x in sampleInfo for k, v in x.items()}, blockInfo["id"] +# else: +# childrenBlockNames = utils.listDir(path) +# for name in childrenBlockNames: +# yield from yieldSample(path + name + "/", +# level + 1, sampleInfo, sampleLevel, blockLevelInfo) +# del sampleInfo[-1] diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py new file mode 100644 index 000000000..8281886ef --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py @@ -0,0 +1,117 @@ +import os +import ujson as json + +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + projects, +) + +DATA_PATH = pythonModuleUtils.DATA_PATH + +# Selections + + +def create_selection(project_id, selection_name, sample_ids, request_id=None): + selection_id = pythonModuleUtils.clean_filename(selection_name) + if len(selection_id) == 0: + selection_id = pythonModuleUtils.timeNow() + + nbS = 1 + while selection_exist(project_id, selection_id): + selection_id = pythonModuleUtils.clean_filename(selection_name) + "_" + str(nbS) + nbS += 1 + + # Save the selection + selectionInfoFilePath = ( + DATA_PATH + project_id + "/selections/" + selection_id + "/info.json" + ) + now = pythonModuleUtils.timeNow() + + selectionInfo = { + "id": selection_id, + "name": selection_name, + "filePath": selectionInfoFilePath, + "creationDate": now, + "updateDate": now, + "samples": sample_ids, + } + + if request_id is not None: + selectionInfo["requestId"] = request_id + + os.mkdir(DATA_PATH + project_id + "/selections/" + selection_id) + pythonModuleUtils.writeJsonFile(selectionInfoFilePath, selectionInfo) + projects.update_project(project_id) + return selectionInfo + + +def get_selections(project_id): + # Get selections + selections = [] + for selection_id in get_selection_ids(project_id): + selections.append(get_selection(project_id, selection_id)) + return selections + + +def get_selection_ids(project_id): + return os.listdir(DATA_PATH + project_id + "/selections/") + + +def selection_exist(project_id, selectionId): + return os.path.exists(DATA_PATH + project_id + "/selections/" + selectionId) + + +def get_selection(project_id, selectionId): + with open( + DATA_PATH + project_id + "/selections/" + selectionId + "/info.json" + ) as json_file: + data = json.load(json_file) + ret = { + "id": data["id"], + "name": data["name"], + "filePath": data["filePath"], + "creationDate": data["creationDate"], + "updateDate": data["updateDate"], + "nbSamples": len(data["samples"]), + } + + # Add the request Id if it exist + if "requestId" in data: + ret["requestId"] = data["requestId"] + + return ret + + +def get_selection_id_list(project_id, selectionId): + if not selection_exist(project_id, selectionId): + raise Exception("Selection " + selectionId + " doesn't exist") + + with open( + DATA_PATH + project_id + "/selections/" + selectionId + "/info.json" + ) as json_file: + data = json.load(json_file) + return data["samples"] + + +# def getSelectionsSamples(project_id, selectionIds: list, intersection: bool) -> set: +# if len(selectionIds) == 0: +# return [] + +# samples = set(get_selection_id_list(project_id, selectionIds[0])) +# for selectionId in selectionIds[1:]: +# if intersection: # intersection of the selections samples +# samples.intersection_update( +# get_selection_id_list(project_id, selectionId)) + +# if len(samples) == 0: +# return [] +# else: # Union of the model results samples +# samples = samples.union( +# get_selection_id_list(project_id, selectionId)) + +# return samples + + +def delete_selection(project_id, selection_id): + pythonModuleUtils.deleteDir(DATA_PATH + project_id + "/selections/" + selection_id) + projects.update_project(project_id) diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py new file mode 100644 index 000000000..13f6ce476 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py @@ -0,0 +1,109 @@ +import os +from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash + +DATA_PATH = pythonModuleUtils.DATA_PATH + + +def getTagsIds(projectId): + try: + return os.listdir(DATA_PATH + projectId + "/tags") + except FileNotFoundError: + os.mkdir(DATA_PATH + projectId + "/tags") + return [] + + +def getTags(projectId): + tagIds = getTagsIds(projectId) + tags = [] + for tagId in tagIds: + tag = getTagById(projectId, tagId) + # Get the number of sample tagged + tag["nbSamples"] = len(tag["tags"].keys()) + # remove the tag values + tag.pop("tags", None) + tags.append(tag) + return tags + + +def getTagById(projectId, tagId): + if tagId not in getTagsIds(projectId): + return None + + return pythonModuleUtils.readJsonFile( + DATA_PATH + projectId + "/tags/" + tagId + "/info.json" + ) + + +def getTagByName(projectId, tagName): + for tagId in getTagsIds(projectId): + tag = getTagById(projectId, tagId) + if tag["name"] == tagName: + return tag + return None + + +def updateTag(projectId, tagName, tagHash): + # TODO change to tagId + # ParametersCheck + projectHashMap = hash.getHashmap(projectId) + + for sampleHash in tagHash.keys(): + if sampleHash not in projectHashMap: + return "SampleHash not found in the project samples", 404 + + tag = getTagByName(projectId, tagName) + if tag: + # Update tag + for sampleHash in tagHash.keys(): + if tagHash[sampleHash] == 0: + tag["tags"].pop(sampleHash, None) + else: + tag["tags"][sampleHash] = tagHash[sampleHash] + + tag["updateDate"] = pythonModuleUtils.timeNow() + pythonModuleUtils.writeJsonFile( + DATA_PATH + projectId + "/tags/" + tag["id"] + "/info.json", tag + ) + return tag, 200 + else: + # Create tag + # tag ID + tagId = pythonModuleUtils.clean_filename(tagName) + if len(tagId) == 0: + tagId = pythonModuleUtils.timeNow() + + nbTag = 1 + while tagId in getTagsIds(projectId): + tagId = pythonModuleUtils.clean_filename(tagName) + "_" + str(nbTag) + nbTag += 1 + + # Save tag + os.mkdir(DATA_PATH + projectId + "/tags/" + tagId) + now = pythonModuleUtils.timeNow() + tagInfo = { + "id": tagId, + "name": tagName, + "tags": tagHash, + "creationDate": now, + "updateDate": now, + } + + pythonModuleUtils.writeJsonFile( + DATA_PATH + projectId + "/tags/" + tagId + "/info.json", tagInfo + ) + + return tagInfo, 200 + + +def deleteTag(projectId, tagId): + pythonModuleUtils.deleteDir(DATA_PATH + projectId + "/tags/" + tagId) + + +def getSamplesHash(projectId, tagId, tagValue): + tag = getTagById(projectId, tagId) + hash = [] + for sampleHash in tag["tags"].keys(): + if tag["tags"][sampleHash] == tagValue: + hash.append(sampleHash) + + return hash diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py new file mode 100644 index 000000000..08fec8241 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py @@ -0,0 +1,332 @@ +import ujson as json +import os + +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + projects, + models, + hash, +) + +DATA_PATH = pythonModuleUtils.DATA_PATH +DATA_TYPES = pythonModuleUtils.DATA_TYPES + +# def getFirstLevelBlock(projectId, blockId): +# blockList = os.listdir(DATA_PATH + projectId + '/blocks') + +# if (blockId not in blockList): +# return -1 + +# with open(DATA_PATH + projectId + '/blocks/' + blockId + '/info.json')\ +# as json_file: +# data = json.load(json_file) + +# return data + + +# def getBlockTree(projectId, path, depth): + +# childrenBlockNames = utils.listDir(path) +# childrenInfo = [] + +# if depth > 0: +# # Get the children blocks info +# for name in childrenBlockNames: +# childrenInfo.append(getBlockTree( +# projectId, path + name + "/", depth - 1)) + +# with open(path + 'info.json') as json_file: +# data = json.load(json_file) +# data['childrenInfoList'] = childrenInfo + +# return data + + +def add_block_tree(projectId, data): + # Loading project block info + bli = projects.get_project_block_level_info(projectId) + + # going through the tree to check for error, store the block to add + blockToAdd = [] + + try: + for block in data["blockTree"]: + addBlockTree(projectId, block, bli, blockToAdd, 0, "") + except KeyError as e: + print(str(e)) + print("badInputTree") + return str(e), 403 + + if len(blockToAdd) == 0: + return "No block added", 201 + + # Store the blocks and the hash map + sampleLevel = len(bli) - 1 + hashToSave = {} + for block in blockToAdd: + if block["level"] == sampleLevel: + # Sample level, creating hash + sampleHash = hash.hash(block["path"]) + block["id"] = sampleHash + hashToSave[sampleHash] = block["path"] + + addBlock(projectId, block) + + # Save hashmap + hash.addToSampleHashmap(projectId, hashToSave) + + projects.update_project(projectId) + return str(len(blockToAdd)) + " added blocks" + + +def getBlockInfo(blockLevel, blockInfo): + """ + Convert the block info to fill the sampleInfo list with a colonName:value dict + """ + print("get block info") + print(blockLevel) + print(blockInfo) + ret = {} + ret[blockLevel["name"]] = blockInfo["name"] + + for dataType in pythonModuleUtils.DATA_TYPES: + if dataType in blockLevel: + for i, column in enumerate(blockLevel[dataType]): + ret[column["name"]] = blockInfo[dataType][i] + + return ret + + +def getBlockTreeFromSamples(projectId, samples: list): + blocksData = [] + addedBlocks = [] + + for samplePath in samples: + try: + sampleBlocksData, endLevel = __getBlockTreeFromSample( + projectId, samplePath, addedBlocks + ) + + if endLevel == 0: + # root block, should be here after added the first sample + blocksData.append(sampleBlocksData) + else: + # Not a root block, we need to find where to insert it + # First, find the root + cur = next( + block + for block in blocksData + if block["path"] in sampleBlocksData["path"] + ) + + # Then, find the level + for i in range(0, endLevel - 1): + cur = next( + child + for child in cur["childrenInfoList"] + if child["path"] in sampleBlocksData["path"] + ) + + cur["childrenInfoList"].append(sampleBlocksData) + except StopIteration: + # TODO : Find why this happens of certain projects + print("Warning, the sample " + samplePath + " doesn't have a root block") + + return blocksData + + +def __getBlockTreeFromSample(projectId, blockPath, addedBlocks): + """ + Go from the bottom to the top of a tree + if at the top or, if block already added, return + + """ + # Add the block we are in + addedBlocks.append(blockPath) + + with open( + DATA_PATH + projectId + "/blocks/" + blockPath + "/info.json" + ) as sampleData: + info = json.load(sampleData) + + if info["level"] == 0: + # Top of the tree, end of the recursively + return info, 0 + + if info["parentPath"] in addedBlocks: + # The block to the top is already added, there is no need to go further + return info, info["level"] + + # Climbing up the tree of one level + parentInfo, endLevel = __getBlockTreeFromSample( + projectId, info["parentPath"], addedBlocks + ) + + # Let's add our info to the parents + cur = parentInfo + + for i in range(endLevel, info["level"] - 1): + cur = cur["childrenInfoList"][0] + cur["childrenInfoList"] = [info] + + return parentInfo, endLevel + + +def addResultsToTree(projectId, tree: list, modelIds: list, commonOnly: bool) -> dict: + """ + Add, in the tree samples, the results from a model id list + """ + + # Load all the model results + modelResults = {} + for modelId in modelIds: + modelResults[modelId] = models.getModelResults(projectId, modelId) + + # Get the project block structure + proBs = projects.get_project_block_level_info(projectId) + sampleLevel = len(proBs) - 1 + + # Add in the samples the results + for rootBlock in tree: + __addResultsToABlock(rootBlock, modelResults, sampleLevel, commonOnly) + + return tree + + +def __addResultsToABlock(block, modelResults, sampleLevel, commonOnly): + if block["level"] == sampleLevel: + # Adding the results to the sample + block["results"] = {} + for modelId in modelResults: + # If no more than 1 model and commonOnly, no need to check if + # sample exist in tree + if ( + commonOnly + or len(modelResults) == 1 + or block["path"] in modelResults[modelId] + ): + block["results"][modelId] = modelResults[modelId][block["path"]] + return + + for child in block["childrenInfoList"]: + __addResultsToABlock(child, modelResults, sampleLevel, commonOnly) + + +# Add samples to a tree +def addBlockTree(projectId, block, blockLevelInfo, blockToAdd, level, parentPath): + __checkBlockCompliant(block, level, blockLevelInfo) + + # check if block exist + data = findBlockInfo(projectId, parentPath + block["name"]) + if data is None: + # BLock doesn't exist + blockToAdd.append(__createBlock(projectId, block, level, parentPath)) + + path = parentPath + block["name"] + "/" + + if level < len(blockLevelInfo) - 1: + for child in block["childrenInfoList"]: + addBlockTree(projectId, child, blockLevelInfo, blockToAdd, level + 1, path) + + +def findBlockInfo(projectId, blockPath): + curPath = DATA_PATH + projectId + "/blocks/" + blockPath + + if not os.path.isdir(curPath): + return None + + with open(curPath + "/info.json", "r") as json_file: + data = json.load(json_file) + + return data + + +def __checkBlockCompliant(block, level, blockLevelInfo): + # Check if a block is correct (name exist, levelInfo coherence, etc) + + if "name" not in block: + raise KeyError("A block at level " + str(level) + " is missing his name") + + # TODO check name valid (no / & .) + + if "childrenInfoList" not in block and level < (len(blockLevelInfo) - 1): + raise KeyError( + "Block : " + block["name"] + " has no childrenInfoList properties" + ) + + if level < len(blockLevelInfo) - 1 and len(block["childrenInfoList"]) == 0: + raise KeyError( + "Block : " + + block["name"] + + " has no child block, the tree need to be complete" + ) + + levelInfo = blockLevelInfo[level] + + for type_ in DATA_TYPES: + if type_ in levelInfo and len(levelInfo[type_]) > 0: + if type_ not in block: + raise KeyError( + "At least one value of type " + + type_ + + " is required in the block : " + + levelInfo["name"] + ) + + if len(block[type_]) != len(levelInfo[type_]): + raise KeyError( + "Exactly " + + str(len(levelInfo[type_])) + + " " + + type_ + + " required in the block : " + + levelInfo["name"] + ) + + # TODO Implement column default + + for i, col in enumerate(levelInfo[type_]): + if col["type"] == "integer" and type(block[type_][i]) is str: + raise KeyError( + "Col " + + col["name"] + + " require an integer in the block : " + + levelInfo["name"] + ) + + +def __createBlock(projectId, block, level, parentPath): + blockPath = parentPath + block["name"] + "/" + + debiaiBlock = { + "id": block["name"], + "name": block["name"], + "path": blockPath, + "parentPath": parentPath, + "level": level, + # "creationDate": str(date.today()), + # "updateDate": str(date.today()), + # "version": "0.0.0", + # "metaDataList": {}, + } + + for type_ in DATA_TYPES: + if type_ in block: + debiaiBlock[type_] = block[type_] + + return debiaiBlock + + +def addBlock(projectId, block): + # create the block folder and his info.json file + try: + os.mkdir(DATA_PATH + projectId + "/blocks/" + block["path"]) + pythonModuleUtils.writeJsonFile( + DATA_PATH + projectId + "/blocks/" + block["path"] + "/info.json", block + ) + except FileExistsError: + print( + "Warning : The block " + + block["path"] + + " already exist, this is not supposed to append" + ) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py new file mode 100644 index 000000000..3ff8afb3a --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -0,0 +1,106 @@ +from backend.modules.dataProviders.DataProvider import DataProvider +from backend.modules.dataProviders.webDataProvider.useCases.data import ( + get_project_id_list, + get_project_samples, +) +from backend.modules.dataProviders.webDataProvider.useCases.projects import ( + get_all_projects_from_data_provider, + get_single_project_from_data_provider, + delete_project, +) +from backend.modules.dataProviders.webDataProvider.useCases.models import ( + get_model_results, + get_models_info, + get_model_result_id, + delete_model, +) +import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections +from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status +from backend.modules.dataProviders.webDataProvider.cache.cache import Cache + + +# WebDataProvider class, allow to get data from a web data-provider +class WebDataProvider(DataProvider): + def __init__(self, url, name): + self.url = url + self._name = name + self.alive = None + + # Init cache + self.cache = Cache() + + @property + def name(self): + return self._name + + @property + def type(self): + return "Web" + + # Todo api call Info (new info) + def is_alive(self): + self.alive = True if get_status(self.url) is True else False + return self.alive + + def get_info(self): + return get_info(self.url) + + # ==== Projects ==== + def get_projects(self): + # Request method to get projects overview + # Return Arr[object{ id, name, nb_samples, nb_models, nb_selections, + # update_time, creation_time}] + return get_all_projects_from_data_provider(self.url, self.name) + + def get_project(self, id_project): + # Request method to get projects overview + # Return object{ id, name, nb_samples, nb_models, nb_selections, + # update_time, creation_time} + return get_single_project_from_data_provider(self.url, self.name, id_project) + + def delete_project(self, project_id): + return delete_project(self.url, project_id) + + def get_id_list(self, project_id, analysis, _from=None, _to=None): + # http Request on dp to get id list + # Return Arr[id] + return get_project_id_list( + self.url, self.cache, project_id, analysis, _from, _to + ) + + def get_samples(self, project_id, analysis, id_list): + # http Request get full sample + # Return object { id: [data]} + return get_project_samples(self.url, project_id, analysis, id_list) + + # ==== Selections ==== + def get_selections(self, project_id): + # Get selections on project + # Return arr[object{ id, name, creation_time, nb_samples}] + return useCaseSelections.get_project_selections(self.url, project_id) + + def get_selection_id_list(self, project_id, selection_id): + return useCaseSelections.get_id_list_from_selection( + self.url, self.cache, project_id, selection_id + ) + + def create_selection(self, project_id, name, id_list, request_id=None): + return useCaseSelections.create_selection( + self.url, project_id, name, id_list, request_id + ) + + def delete_selection(self, project_id, selection_id): + return useCaseSelections.delete_selection(self.url, project_id, selection_id) + + # ==== Models ==== + def get_models(self, project_id): + return get_models_info(self.url, project_id) + + def get_model_results_id_list(self, project_id, model_id): + return get_model_result_id(self.url, self.cache, project_id, model_id) + + def get_model_results(self, project_id, model_id, sample_list): + return get_model_results(self.url, project_id, model_id, sample_list) + + def delete_model(self, project_id, model_id): + return delete_model(self.url, project_id, model_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py b/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py new file mode 100644 index 000000000..68b2c05f7 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py @@ -0,0 +1,90 @@ +# This service is used to cache data from the web data provider +# It's used to avoid multiple requests to the web data provider +# It will mainly save the id list of samples, selections and models results +# The ability to cache and the time to live are configurable in the config file + +from backend.config.init_config import get_config +from cacheout import Cache as CacheoutCache + + +class Cache: + def __init__(self): + # Get config + self.config = get_config() + + self.cache_enabled = self.config["WEB_DATA_PROVIDERS_CONFIG"]["cache"] + self.cache_ttl = self.config["WEB_DATA_PROVIDERS_CONFIG"]["cache_duration"] + + # Init cache + self.project_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) + # __: [...] + # _total: [...] + + self.selection_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) + # _: [...] + + self.model_result_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) + # _: [...] + + # Project id list + def get_key(self, id_project, _from=None, _to=None): + if _from is None or _to is None: + return "{}_total".format(id_project) + else: + return "{}_{}_{}".format(id_project, _from, _to) + + def get_id_list(self, id_project, _from=None, _to=None): + if not self.cache_enabled: + return None + + key = self.get_key(id_project, _from, _to) + + return self.project_id_list_cache.get(key) + + def set_id_list(self, id_project, id_list, _from=None, _to=None): + if not self.cache_enabled: + return + + key = self.get_key(id_project, _from, _to) + + self.project_id_list_cache.set(key, id_list) + + # Selection id list + def get_selection_key(self, id_project, id_selection): + return "{}_{}".format(id_project, id_selection) + + def get_selection_id_list(self, id_project, id_selection): + if not self.cache_enabled: + return None + + key = self.get_selection_key(id_project, id_selection) + + return self.selection_id_list_cache.get(key) + + def set_selection_id_list(self, id_project, id_selection, id_list): + if not self.cache_enabled: + return + + key = self.get_selection_key(id_project, id_selection) + + self.selection_id_list_cache.set(key, id_list) + + # Model result id list + def get_model_result_key(self, id_project, id_model): + return "{}_{}".format(id_project, id_model) + + def get_model_result_id_list(self, id_project, id_model): + if not self.cache_enabled: + return None + + key = self.get_model_result_key(id_project, id_model) + + return self.model_result_id_list_cache.get(key) + + def set_model_result_id_list(self, id_project, id_model, id_list): + if not self.cache_enabled: + return + + key = self.get_model_result_key(id_project, id_model) + + self.model_result_id_list_cache.set(key, id_list) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py b/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py new file mode 100644 index 000000000..1b32e7fa4 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py @@ -0,0 +1,222 @@ +import requests +import json +from backend.modules.dataProviders.DataProviderException import DataProviderException + + +# Todo : change info if in not alive anymore +def get_status(url): + try: + r = requests.get(url + "/info") + + if r.status_code != 200: + return False + + # Check content type + content = get_http_response(r) + + if content is None: + return False # we are expecting a dict + + return True + + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return False + except requests.exceptions.InvalidURL: + raise DataProviderException("Invalid URL", 400) + + +def get_info(url): + try: + r = requests.get(url + "/info") + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +# ==== Projects ==== +def get_projects(url): + try: + r = requests.get(url + "/projects") + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def get_project(url, id_project): + try: + r = requests.get(url + "/projects/" + id_project) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def get_id_list(url, id_project, analysis, _from=None, _to=None): + try: + if _from is not None and _to is not None: + url = ( + url + + "/projects/" + + id_project + + "/data-id-list?from={}&to={}&analysisId={}".format( + _from, _to, analysis["id"] + ) + ) + else: + url = ( + url + + "/projects/" + + id_project + + "/data-id-list?analysisId={}".format(analysis["id"]) + ) + + if analysis["start"]: + url += "&analysisStart={}".format(str(analysis["start"]).lower()) + if analysis["end"]: + url += "&analysisEnd={}".format(str(analysis["end"]).lower()) + + r = requests.get(url) + + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + print( + "Error getting data id list from {} on project {}".format(url, id_project) + ) + return [] + + +def get_samples(url, id_project, analysis, id_list): + try: + rurl = ( + url + + "/projects/{}/data?analysisId={}&analysisStart={}&analysisEnd={}".format( + id_project, + analysis["id"], + str(analysis["start"]).lower(), + str(analysis["end"]).lower(), + ) + ) + + r = requests.post(rurl, json=id_list) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + raise Exception( + "Could not get the data provider {} data for project {}".format( + url, id_project + ) + ) + + +def delete_project(url, id_project): + try: + r = requests.delete(url + "/projects/" + id_project) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +# ==== Selections ==== +def get_selections(url, id_project): + try: + r = requests.get(url + "/projects/{}/selections".format(id_project)) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def post_selection(url, id_project, data): + try: + r = requests.post(url + "/projects/{}/selections".format(id_project), json=data) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def get_selection_id(url, id_project, id_selection): + try: + r = requests.get( + url + + "/projects/{}/selections/{}/selected-data-id-list".format( + id_project, id_selection + ) + ) + + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def delete_selection(url, id_project, id_selection): + try: + r = requests.delete( + url + "/projects/{}/selections/{}".format(id_project, id_selection) + ) + + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +# ==== Models ==== +def get_models(url, id_project): + try: + r = requests.get(url + "/projects/{}/models".format(id_project)) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def get_model_result_id_list(url, project_id, model_id): + try: + r = requests.get( + url + + "/projects/{}/models/{}/evaluated-data-id-list".format( + project_id, model_id + ) + ) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def get_model_result(url, id_project, id_model, id_sample_list): + try: + r = requests.post( + url + "/projects/{}/models/{}/results".format(id_project, id_model), + json=id_sample_list, + ) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +def delete_model(url, id_project, id_model): + try: + r = requests.delete(url + "/projects/{}/models/{}".format(id_project, id_model)) + return get_http_response(r) + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return None + + +# ==== Utils ==== +def get_http_response(response): + try: + if response.raise_for_status() is None: + return get_valid_response(response) + except requests.exceptions.HTTPError: + return get_error_response(response) + + +def get_valid_response(response): + if response.status_code == 204: + return True + try: + return response.json() + except json.decoder.JSONDecodeError: + return + + +def get_error_response(response): + if response.status_code == 500: + raise DataProviderException("Data Provider unexpected Error", 500) + + raise DataProviderException(response.text, response.status_code) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py new file mode 100644 index 000000000..eb3ba8d63 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py @@ -0,0 +1,23 @@ +import backend.modules.dataProviders.webDataProvider.http.api as api + +# +# UseCase folder role is the middleware between class methods and http requests +# It's used to make all changes in data we took from DP and send it back to +# the class/controller +# + + +def get_project_id_list(url, cache, id_project, analysis, _from=None, _to=None): + id_list = cache.get_id_list(id_project, _from, _to) + + if id_list is None: + id_list = api.get_id_list(url, id_project, analysis, _from, _to) + cache.set_id_list(id_project, id_list, _from, _to) + + return id_list + + +def get_project_samples(url, id_project, analysis, id_list): + data = api.get_samples(url, id_project, analysis, id_list) + + return data diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py new file mode 100644 index 000000000..6b646b261 --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py @@ -0,0 +1,58 @@ +import backend.modules.dataProviders.webDataProvider.http.api as api +from backend.modules.dataProviders.DataProviderException import DataProviderException + + +def get_models_info(url, project_id): + # Models + try: + models = api.get_models(url, project_id) + debiai_models = [] + for model_in in models: + if "id" not in model_in: + continue + model = { + "id": model_in["id"], + "metadata": None, + "creationDate": None, + } + + # Adding name and nbResults + model["name"] = model_in["name"] if "name" in model_in else model_in["id"] + if "nbResults" in model_in: + model["nbResults"] = model_in["nbResults"] + + # Adding metadata + if "metadata" in model_in: + model["metadata"] = model_in["metadata"] + + # Adding creationDate + if "creationDate" in model_in: + model["creationDate"] = model_in["creationDate"] + + debiai_models.append(model) + + return debiai_models + except DataProviderException: + # The route may not be implemented in the data provider + return [] + + +def get_model_result_id(url, cache, project_id, model_id): + # Todo : Add route to call Id results for a Model (DP) + # Todo : Add Some formatting if data has to change + + id_list = cache.get_model_result_id_list(project_id, model_id) + + if id_list is None: + id_list = api.get_model_result_id_list(url, project_id, model_id) + cache.set_model_result_id_list(project_id, model_id, id_list) + + return id_list + + +def get_model_results(url, project_id, model_id, sample_list): + return api.get_model_result(url, project_id, model_id, sample_list) + + +def delete_model(url, project_id, model_id): + return api.delete_model(url, project_id, model_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py new file mode 100644 index 000000000..c3dad454b --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py @@ -0,0 +1,131 @@ +import backend.modules.dataProviders.webDataProvider.http.api as api + +from backend.modules.dataProviders.webDataProvider.useCases.models import get_models_info +from backend.modules.dataProviders.webDataProvider.useCases.selections import ( + get_project_selections, +) + + +def get_all_projects_from_data_provider(url, name): + projects = api.get_projects(url) + project_list = [] + + if not projects: + return + + for project_id in projects: + if "nbSamples" not in projects[project_id]: + projects[project_id]["nbSamples"] = None + + if "nbModels" not in projects[project_id]: + projects[project_id]["nbModels"] = None + + if "nbSelections" not in projects[project_id]: + projects[project_id]["nbSelections"] = None + + if "name" not in projects[project_id]: + projects[project_id]["name"] = project_id + + if "creationDate" not in projects[project_id]: + projects[project_id]["creationDate"] = None + + if "updateDate" not in projects[project_id]: + projects[project_id]["updateDate"] = None + + project_list.append( + { + "id": project_id, + "dataProvider": name, + "name": projects[project_id]["name"], + "nbModels": projects[project_id]["nbModels"], + "nbSamples": projects[project_id]["nbSamples"], + "nbSelections": projects[project_id]["nbSelections"], + "creationDate": projects[project_id]["creationDate"], + "updateDate": projects[project_id]["updateDate"], + } + ) + + return project_list + + +def get_single_project_from_data_provider(url, data_provider_name, id_project): + project = api.get_project(url, id_project) + + # Check the project columns + project_columns = get_project_columns(project) + + # Add selections + selections = get_project_selections(url, id_project) + + # Add models + models = get_models_info(url, id_project) + + # Check nbSamples + if "nbSamples" in project: + nbSamples = project["nbSamples"] + else: + nbSamples = None + + # Check creationDate + if "creationDate" in project: + creationDate = project["creationDate"] + else: + creationDate = None + + # Check updateDate + if "updateDate" in project: + updateDate = project["updateDate"] + else: + updateDate = None + + # Converting views to DebiAI projects + return { + "id": id_project, + "name": project["name"] if "name" in project else id_project, + "dataProvider": data_provider_name, + "columns": project_columns, + "resultStructure": project["expectedResults"], + "nbModels": len(models), + "nbSamples": nbSamples, + "nbSelections": len(selections), + "creationDate": creationDate, + "updateDate": updateDate, + "selections": selections, + "models": models, + } + + +def get_project_columns(project): + project_columns = [] + # Expected project["columns"] example : + # [ + # { "name": "storage", "category": "other" }, + # { "name": "age", "category": "context" }, + # { "name": "path", "category": "input", group: "image" }, + # { "name": "label", "category": "groundtruth", group: "image" }, + # { "name": "type" }, # category is not specified, it will be "other" + # ] + if "columns" in project: + for column in project["columns"]: + col = {"name": column["name"]} + + if "category" in column: + col["category"] = column["category"] + else: + col["category"] = "other" + + if "type" in column: + col["type"] = column["type"] + else: + col["type"] = "auto" + + if "group" in column: + col["group"] = column["group"] + + project_columns.append(col) + + return project_columns + + +def delete_project(url, project_id): + api.delete_project(url, project_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py new file mode 100644 index 000000000..794328d4c --- /dev/null +++ b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py @@ -0,0 +1,61 @@ +import backend.modules.dataProviders.webDataProvider.http.api as api +from backend.modules.dataProviders.DataProviderException import DataProviderException + + +def get_project_selections(url, project_id): + try: + selections = api.get_selections(url, project_id) + + if selections is None: + print(f"Error: No selections found for project {project_id} on {url}") + raise DataProviderException("No selections found", 404) + + debiai_selections = [] + for selection in selections: + if "id" not in selection or selection["id"] is None: + print(f"Error: No id for selection: {selection}") + raise DataProviderException( + "An id is missing in the given selection", 400 + ) + + selection_to_add = { + "name": selection["name"] if "name" in selection else selection["id"], + "id": selection["id"], + } + + if "nbSamples" in selection: + selection_to_add["nbSamples"] = selection["nbSamples"] + if "creationDate" in selection: + selection_to_add["creationDate"] = selection["creationDate"] + if "updateDate" in selection: + selection_to_add["updateDate"] = selection["updateDate"] + + debiai_selections.append(selection_to_add) + return debiai_selections + + except DataProviderException: + # The route may not be implemented in the data provider + return [] + + +def get_id_list_from_selection(url, cache, project_id, selection_id): + id_list = cache.get_selection_id_list(project_id, selection_id) + + if id_list is None: + id_list = api.get_selection_id(url, project_id, selection_id) + cache.set_selection_id_list(project_id, selection_id, id_list) + + return id_list + + +def create_selection(url, project_id, name, id_list, request_id): + data = {"idList": id_list, "name": name} + + if request_id is not None: + data["request"]: request_id + + return api.post_selection(url, project_id, data) + + +def delete_selection(url, project_id, selection_id): + return api.delete_selection(url, project_id, selection_id) diff --git a/build/lib/backend/modules/exportMethods/__init__.py b/build/lib/backend/modules/exportMethods/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/exportMethods/exportClass.py b/build/lib/backend/modules/exportMethods/exportClass.py new file mode 100644 index 000000000..f5eb49284 --- /dev/null +++ b/build/lib/backend/modules/exportMethods/exportClass.py @@ -0,0 +1,43 @@ +import uuid + +############################################################################# +# +# Export type and method classes +# +# Those class are used to export data from a specific export type +# +############################################################################# + + +class ExportType: + name = None + parameters_definition = [] + export_method_class = None + + def to_dict(self): + return {"name": self.name, "parameters": self.parameters_definition} + + +class ExportMethod: + id = None + type = None # ExportType object + name = None + parameters = [] + + deletable = False + + def __init__(self, type, name, parameters): + self.id = uuid.uuid4().hex + self.type = type + self.name = name + self.parameters = parameters + + def to_dict(self): + return { + "id": self.id, + "type": self.type.name, + "name": self.name, + "parameters": self.parameters, + "parameterNames": self.type.parameters_definition, + "deletable": self.deletable, + } diff --git a/build/lib/backend/modules/exportMethods/exportUtils.py b/build/lib/backend/modules/exportMethods/exportUtils.py new file mode 100644 index 000000000..d7502bcd6 --- /dev/null +++ b/build/lib/backend/modules/exportMethods/exportUtils.py @@ -0,0 +1,200 @@ +from backend.config.init_config import get_config +import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from backend.modules.dataProviders.DataProviderException import DataProviderException +import time + +from backend.modules.exportMethods.methods.kafkaUtils import KafkaExportType +from backend.modules.exportMethods.methods.postUtils import PostExportType + +############################################################################# +# +# Export utils +# +# DebiAI allows to export data or selections to other services with +# different methods. +# This utils load and store the methods for all the projects +# +############################################################################# + +# The export types are the different types of export methods that we can create +# They are used to create the export methods +export_types = [KafkaExportType(), PostExportType()] + +# The export methods are the different methods created from the types +# They we can used to export data +export_methods = [] + + +# Export types +def get_export_type(typeName): + return [type for type in export_types if type.name == typeName][0] + + +def type_exist(typeName): + return typeName in [type.name for type in export_types] + + +# Export utils +def get_export_methods(): + # Return all the export methods as a list of dictionaries + return [method.to_dict() for method in export_methods] + + +def get_export_method(methodId): + # Check the method id + if not method_exist(methodId): + raise Exception("Export method " + methodId + " not found") + + return [method for method in export_methods if method.id == methodId][0] + + +def method_exist(methodId): + return methodId in [method.id for method in export_methods] + + +def load_export_methods(): + global export_methods + print("================== EXPORT METHODS ==================") + + # Load the export methods from the config file + config = get_config() + + if "EXPORT_METHODS_LIST" in config: + print(" - Loading export methods from config file") + config_export_methods = config["EXPORT_METHODS_LIST"] + + for method in config_export_methods: + print( + " Adding method " + method, "[", config_export_methods[method], "]" + ) + + try: + parameters = config_export_methods[method].split(",") + if len(parameters) == 0: + raise "method " + method + " has no parameters, aborting" + + # Trim parameters + for i in range(len(parameters)): + parameters[i] = "".join(parameters[i].rstrip().lstrip()) + + export_type_name = parameters[0] + export_method = create_export_method( + method, export_type_name, parameters[1:] + ) + + if config["EXPORT_METHODS_CONFIG"]["deletion"]: + # The export method created from the config file are deletable + export_method.deletable = True + + export_methods.append(export_method) + except Exception as e: + print("Error while configuring method " + method + ": " + str(e)) + + if len(export_methods) == 0: + print(" No export method configured") + + +def add_export_method(data): + # Check the method type + if not type_exist(data["type"]): + raise Exception("Method type " + data["type"] + " not found") + + export_method = create_export_method(data["name"], data["type"], data["parameters"]) + + config = get_config() + if config["EXPORT_METHODS_CONFIG"]["deletion"]: + # The export method created from the config file are deletable + export_method.deletable = True + + export_methods.append(export_method) + + return export_method.to_dict() + + +def create_export_method(name, type, parameters): + # Check the method type + if not type_exist(type): + raise Exception( + "Export type '" + + type + + "' isn't supported, only " + + str([type.name for type in export_types]) + + " are supported" + ) + + # Get the export type + export_type = get_export_type(type) + + # Create the method + return export_type.export_method_class(name, parameters) + + +def delete_export_method(method_id): + global export_methods + + # Check the method id + if not method_exist(method_id): + raise Exception("The export method wasn't found") + + # Delete the method + export_methods = [method for method in export_methods if method.id != method_id] + return "method " + method_id + " deleted" + + +# Export data +def exportSelection(dataProviderId, projectId, data): + method_id = data["exportMethodId"] + + # Check the method id + if not method_exist(method_id): + raise Exception("method " + method_id + " not found") + + export_method = get_export_method(method_id) + + # Creation of the data selection to export + try: + data_provider = data_provider_manager.get_single_data_provider(dataProviderId) + project = data_provider.get_project(projectId) + except DataProviderException as e: + return e.message, e.status_code + + id_list = [] + + for id in data["sampleHashList"]: + id_list.append({"id": id}) + + data_to_export = { + "origin": "DebiAI", + "type": "selection", + "projectId": projectId, + "data_provider_id": dataProviderId, + "selection_name": data["selectionName"], + "date": time.time(), + "sample_ids": id_list, + } + + # Project name + if "name" in project: + data_to_export["project_name"] = project["name"] + + # Annotation extra value + if "annotationValue" in data and data["annotationValue"] != "": + data_to_export["value"] = data["annotationValue"] + + # Export the data + export_method.export(data_to_export) + + return "data exported" + + +def exportData(method_id, data): + # Check the method id + if not method_exist(method_id): + raise Exception("method " + method_id + " not found") + + export_method = get_export_method(method_id) + + # Export the data + export_method.export(data) + + return "data exported" diff --git a/build/lib/backend/modules/exportMethods/methods/__init__.py b/build/lib/backend/modules/exportMethods/methods/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py b/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py new file mode 100644 index 000000000..09c708df1 --- /dev/null +++ b/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py @@ -0,0 +1,71 @@ +from kafka import KafkaProducer +from backend.modules.exportMethods.exportClass import ExportType, ExportMethod +import json + +############################################################################# +# +# Kafka export method +# +# Connect to a kafka server on init and send data to a topic +# +############################################################################# + + +class KafkaExportType(ExportType): + def __init__(self): + super().__init__() + + self.name = "kafka" + # Expected parameters: [server, topic] + self.parameters_definition = ["server", "topic"] + + self.export_method_class = KafkaExportMethod + + +class KafkaExportMethod(ExportMethod): + up = False + + def __init__(self, name, parameters): + super().__init__(KafkaExportType(), name, parameters) + + # Expected parameters: [server, topic] + # Check parameters + if len(parameters) != 2: + raise Exception( + "Kafka export type requires 2 parameters : server and topic" + ) + + # Create producer + self.server = parameters[0] + self.topic = parameters[1] + + # Create Kafka producer + try: + self.producer = KafkaProducer( + bootstrap_servers=self.server, + value_serializer=lambda v: json.dumps(v).encode("utf-8"), + ) + self.up = True + except Exception as e: + print("Kafka producer creation failed : " + str(e)) + print("server : '" + self.server + "'") + raise Exception( + "Kafka producer creation on server '" + + self.server + + "' failed with error : " + + str(e) + ) + + def export(self, data): + print("Kafka export method : Sending data to kafka", self.server, self.topic) + print(data) + + if not self.up: + raise Exception("Kafka producer is not up") + + try: + print(self.producer.send(self.topic, data)) + print("Kafka export method : Data sent") + except Exception as e: + print("Kafka export method : Error sending data to kafka", e) + raise "Kafka export method : Error sending data to kafka" diff --git a/build/lib/backend/modules/exportMethods/methods/postUtils.py b/build/lib/backend/modules/exportMethods/methods/postUtils.py new file mode 100644 index 000000000..a37d18aff --- /dev/null +++ b/build/lib/backend/modules/exportMethods/methods/postUtils.py @@ -0,0 +1,59 @@ +from backend.modules.exportMethods.exportClass import ExportType, ExportMethod +import requests + +############################################################################# +# +# HTTP - Post export method +# +# Send data through a post request to a server +# +############################################################################# + + +class PostExportType(ExportType): + def __init__(self): + super().__init__() + + self.name = "post" + self.parameters_definition = ["url"] + + self.export_method_class = PostExportMethod + + +class PostExportMethod(ExportMethod): + up = False + + def __init__(self, name, parameters): + super().__init__(PostExportType(), name, parameters) + + # Expected parameters: [url] + # Check parameters + if len(parameters) != 1: + raise Exception("Post export type requires 1 parameter : the url") + + self.url = parameters[0] + + # Check url + if not self.url.startswith("http://") and not self.url.startswith("https://"): + raise Exception( + "Url '" + self.url + "' must start with http:// or https://" + ) + + self.up = True + + def export(self, data): + print("Post export method: Sending data to '" + self.url + "'") + + if not self.up: + raise Exception("Can't send data to '" + self.url + "'") + + try: + # Send data + r = requests.post(self.url, json=data) + r.raise_for_status() + print("Post export method : Data sent") + except Exception as e: + print("Post export method : Error sending post request", e) + raise Exception( + "Post export method : Error sending post request on url" + str(e) + ) diff --git a/build/lib/backend/server.py b/build/lib/backend/server.py new file mode 100644 index 000000000..0c3e2210c --- /dev/null +++ b/build/lib/backend/server.py @@ -0,0 +1,5 @@ +from backend.backend import start_server + + +def run(): + start_server() diff --git a/build/lib/backend/swagger.yaml b/build/lib/backend/swagger.yaml new file mode 100644 index 000000000..a6cb5c5b4 --- /dev/null +++ b/build/lib/backend/swagger.yaml @@ -0,0 +1,1914 @@ +swagger: "2.0" +info: + version: 0.27.1 + title: DebiAI_BACKEND_API + description: DebiAI backend api + contact: + email: debiai@irt-systemx.fr + license: + name: Apache 2.0 + url: https://www.apache.org/licenses/LICENSE-2.0.html +paths: + /version: + get: + summary: Ping to check if the backend is running + operationId: backend.controller.projects.ping + responses: + 200: + description: The server is online + + # Data providers + /data-providers: + get: + summary: Get data providers list and status + tags: [Data Providers] + operationId: backend.controller.dataProviders.get_data_providers + responses: + 200: + description: List of data providers + schema: + type: array + items: + $ref: "#/definitions/dataProvider" + + post: + summary: Add data provider to data providers list + tags: [Data Providers] + operationId: backend.controller.dataProviders.post_data_providers + parameters: + - name: data + in: body + schema: + type: object + required: + - name + - type + properties: + name: + type: string + description: The name of the data Provider + type: + type: string + description: Type of the data Provider (Web) + url: + type: string + description: The url of the new data Provider if type is Web + responses: + 204: + description: Data provider added to the list + 400: + description: Bad request on data + + /data-providers/{dataProviderId}: + delete: + summary: Delete data providers from the list + tags: [Data Providers] + operationId: backend.controller.dataProviders.delete_data_providers + parameters: + - name: dataProviderId + in: path + type: string + required: true + responses: + 204: + description: Data provider deleted + 400: + description: The Data provider id must not be null + 404: + description: The data provider doesn't exist + + get: + summary: Get general informations about a data provider, like his version or the max number sample for each type of request + tags: [Data Providers] + operationId: backend.controller.dataProviders.get_data_provider_info + parameters: + - name: dataProviderId + in: path + type: string + required: true + responses: + 200: + description: The info of the data provider + schema: + type: object + required: + - version + properties: + version: + type: string + maxSampleIdByRequest: + type: integer + maxSampleDataByRequest: + type: integer + maxResultByRequest: + type: integer + + canDelete: + type: object + description: Information about what can be deleted by DebiAI + properties: + projects: + type: boolean + default: true + selections: + type: boolean + default: true + models: + type: boolean + default: true + 400: + description: The Data provider id must not be null + 404: + description: The data provider doesn't exist + + # Projects + /projects: + get: + summary: Get the projects overview + tags: [Project] + operationId: backend.controller.projects.get_projects + responses: + 200: + description: List of project overviews + schema: + type: array + items: + $ref: "#/definitions/projectOverview" + + post: + summary: Post a new project + tags: [Project] + operationId: backend.controller.pythonModuleDp.post_project + parameters: + - name: data + in: body + schema: + type: object + required: + - projectName + properties: + projectName: + type: string + description: The project name + blockLevelInfo: + type: array + items: + $ref: "#/definitions/blockLevelInfo" + description: List of the block level info + + responses: + 200: + description: project created, the project ID is returned + schema: + type: object + required: + - projectId + properties: + projectId: + type: string + 400: + description: The project name must not be null + 401: + description: The project name is too long + 402: + description: The project name contain invalid characters + 403: + description: A project with the same name already exist + + /data-providers/{dataProviderId}/projects: + get: + summary: Get the projects overview for a data provider + tags: [Project] + operationId: backend.controller.projects.get_data_providers_project + parameters: + - name: dataProviderId + in: path + type: string + required: true + responses: + 200: + description: List of project overviews + schema: + type: array + items: + $ref: "#/definitions/projectOverview" + + /data-providers/{dataProviderId}/projects/{projectId}: + get: + summary: Get project name, nb of models & nb of selections (overviews of a project) + tags: [Project] + operationId: backend.controller.projects.get_project + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + responses: + 200: + description: project + schema: + $ref: "#/definitions/project" + + delete: + summary: remove a project from ID + tags: [Project] + operationId: backend.controller.projects.delete_project + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + responses: + 200: + description: project deleted + 404: + description: project doesn't exist + + /data-providers/{dataProviderId}/projects/{projectId}/dataIdList: + post: + summary: Get the project data id list + tags: [Project] + operationId: backend.controller.projects.get_data_id_list + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: requestParameters + in: body + required: true + schema: + type: object + required: + - analysis + - from + - to + properties: + from: + type: integer + description: The index of the first data to return + x-nullable: true + to: + type: integer + description: The index of the last data to return + x-nullable: true + analysis: + type: object + required: + - id + properties: + id: + type: string + description: Id of the analysis + start: + type: boolean + description: If true, this is the first request of the analysis + end: + type: boolean + description: If true, this is the last request of the analysis + + responses: + 200: + description: project + schema: + $ref: "#/definitions/project" + + # BlockLevels + /data-providers/{dataProviderId}/projects/{projectId}/blocklevels: + post: + summary: add a new data blocks level structure + tags: [Project] + operationId: backend.controller.pythonModuleDp.post_block_levels + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: block_levels + in: body + schema: + type: array + items: + type: object + properties: + name: + type: string + groundTruth: + type: array + items: + type: object + properties: + name: + type: string + type: + type: string + inputs: + type: array + items: + type: object + properties: + name: + type: string + type: + type: string + contexts: + type: array + items: + type: object + properties: + name: + type: string + type: + type: string + minItems: 1 + required: true + responses: + 200: + schema: + type: object + description: Block tructure added, the block structure is returned + + /data-providers/{dataProviderId}/projects/{projectId}/resultsStructure: + post: + summary: add a new expected results structure + tags: [Project] + operationId: backend.controller.pythonModuleDp.post_resultsStructure + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: resultStructure + in: body + schema: + type: array + items: + type: object + required: + - name + - type + properties: + name: + type: + - number + - string + type: + type: string + description: text, number or bool + default: + type: + - number + - string + - boolean + group: + type: string + description: Optional group name, used to group the results columns in the UI + required: true + responses: + 200: + schema: + type: object + description: Results structure added, the resultStructure is returned + 404: + description: The project does not exist + 403: + description: The result structure already exist + + # Models + /data-providers/{dataProviderId}/projects/{projectId}/models: + post: + summary: add a model + tags: [Model] + operationId: backend.controller.models.post_model + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: data + in: body + schema: + type: object + required: + - name + properties: + name: + type: string + metadata: + type: object + description: Model metadata - key value list + required: true + responses: + 200: + description: Model added + 409: + description: Warning - Model already exist + 402: + description: Model name contain invalid characters + 404: + description: Project not found + + /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}: + get: + summary: Get a model results id list + tags: [Model] + operationId: backend.controller.models.get_model_id_list + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: modelId + in: path + type: string + required: true + responses: + 200: + description: model id list + schema: + type: array + items: + type: string + 404: + description: model or project doesn't exist + + delete: + summary: remove a model + tags: [Model] + operationId: backend.controller.models.delete_model + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: modelId + in: path + type: string + required: true + responses: + 200: + description: model deleted + 404: + description: model or project doesn't exist + + ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/resultsDict + : post: + summary: Add a results to a model + tags: [Model] + operationId: backend.controller.pythonModuleDp.add_results_dict + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: modelId + in: path + type: string + required: true + - name: data + description: Tree object with existing block references, the end of the tree need to include the expected results in the block structure + in: body + required: true + schema: + type: object + required: + - results + properties: + results: + type: object + expected_results_order: + description: order of the given results array, by default is the project result structure + type: array + + responses: + 200: + description: model results added + 403: + description: Block not found + 404: + description: model or project doesn't exist + + ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/getModelResults + : post: + summary: Get the model results from a sample list + tags: [Model] + operationId: backend.controller.models.get_results + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: modelId + in: path + type: string + required: true + - name: data + in: body + required: true + schema: + type: object + required: + - sampleIds + properties: + sampleIds: + description: List of sample ID + items: + type: [string, integer, number] + responses: + 200: + description: model results + schema: + type: object + additionalProperties: + type: array + description: List of results ordered the same way as the project expected results + 404: + description: model or project doesn't exist + + # Blocks + /data-providers/{dataProviderId}/projects/{projectId}/blocks: + post: + summary: add a tree to an existing project block tree + tags: [Block] + operationId: backend.controller.pythonModuleDp.post_block_tree + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + description: project ID + required: true + + - name: data + in: body + schema: + type: object + required: + - blockTree + properties: + blockTree: + $ref: "#/definitions/blockTree" + required: true + + responses: + 200: + description: Block tree added + 403: + description: Invalid parameters + 404: + description: Project not found + + /data-providers/{dataProviderId}/projects/{projectId}/blocksFromSampleIds: + post: + summary: get a project tree form a sample list + tags: [Block] + operationId: backend.controller.data.get_data + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + description: project ID + required: true + + - name: data + in: body + required: true + schema: + type: object + required: + - sampleIds + properties: + sampleIds: + type: array + items: + type: [string, integer, number] + + analysis: + description: Informations about the analysis to help data-providers with data management + $ref: "#/definitions/analysis" + + responses: + 200: + description: Block tree with sample + schema: + type: object + 404: + description: Project or one of the models not found + + # Selections + /data-providers/{dataProviderId}/projects/{projectId}/selections/: + get: + summary: Get the project selections + tags: [Selection] + operationId: backend.controller.selection.get_selections + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + responses: + 200: + description: Project selections + schema: + type: array + items: + $ref: "#/definitions/selection" + + post: + summary: add a selection + tags: [Selection] + operationId: backend.controller.selection.post_selection + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: data + in: body + schema: + type: object + required: + - sampleHashList + - selectionName + properties: + sampleHashList: + type: array + items: + type: string + description: List of the selection sample id (hash) + selectionName: + type: string + requestDescription: + type: string + x-nullable: true + requestId: + type: string + description: Id of the request that has created the selection. + x-nullable: true + responses: + 200: + description: selection added + schema: + type: object + + ? /data-providers/{dataProviderId}/projects/{projectId}/selections/{selectionId} + : get: + summary: Get a project selection id list + tags: [Selection] + operationId: backend.controller.selection.get_selection_id_list + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: selectionId + in: path + type: string + required: true + responses: + 200: + description: Project selection id list + schema: + type: array + items: + type: string + 404: + description: Selection, project or data provider not found + + delete: + summary: delete a selection + tags: [Selection] + operationId: backend.controller.selection.delete_selection + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: selectionId + in: path + type: string + required: true + responses: + 200: + description: selection deleted + + # Analysis layouts + /app/layouts/: + get: + summary: Get all layouts + tags: [Layouts] + operationId: backend.controller.layouts.get_layouts + responses: + 200: + description: Layouts for all projects + schema: + type: array + items: + $ref: "#/definitions/projectLayout" + + post: + summary: Add a layout + tags: [Layouts] + operationId: backend.controller.layouts.post_layout + parameters: + - name: data + in: body + required: true + schema: + type: object + required: + - name + - projectId + - dataProviderId + - layout + properties: + name: + type: string + description: Name of the configuration + maxLength: 100 + minLength: 1 + projectId: + type: string + description: Id of the project linked to the configuration + dataProviderId: + type: string + description: Id of the data provider linked to the project id + description: + type: string + layout: + $ref: "#/definitions/dashboardLayout" + lastLayoutSaved: + type: boolean + description: If true, the layout will be loaded by default + If a layout with lastLayoutSaved=true already exists, it will + be replaced by the new one + default: false + selectedColorColumn: + type: string + description: Column selected to be used as color + x-nullable: true + + responses: + 204: + description: Dashboard layout saved + + /app/layouts/{id}: + delete: + summary: Delete a layout + tags: [Layouts] + operationId: backend.controller.layouts.delete_layout + parameters: + - name: id + in: path + type: string + required: true + description: Id of the layout to delete + minLength: 1 + responses: + 204: + description: Layout deleted + 404: + description: Layout not found + + # Widget configuration + /app/widget-configurations/: + get: + summary: Get all widget configurations overview, + return the number of configurations for each widget + tags: [Widget configurations] + operationId: backend.controller.widgetConfigurations.get_all_configurations + responses: + 200: + description: Widget configurations number for each widget + schema: + type: object + description: Object with widget title as key and number of + configurations as value + additionalProperties: + type: integer + minimum: 0 + + /app/widgets/{widgetKey}/configurations: + get: + summary: Get the widget configurations + tags: [Widget configurations] + operationId: backend.controller.widgetConfigurations.get_widget_configurations + parameters: + - name: widgetKey + in: path + description: Title of the wigdet linked to the configuration + type: string + required: true + minLength: 1 + responses: + 200: + description: Widget configurations list + schema: + type: array + items: + type: object + required: + - id + - name + - projectId + - dataProviderId + - configuration + properties: + id: + type: string + name: + type: string + description: + type: string + projectId: + type: string + description: Id of the project linked to the configuration + dataProviderId: + type: string + description: Id of the data provider linked to the project id + creationDate: + type: string + configuration: + type: object + description: Key value list with the configuration, specific to the widget + post: + summary: Add a widget configuration + tags: [Widget configurations] + operationId: backend.controller.widgetConfigurations.post_configuration + parameters: + - name: widgetKey + in: path + required: true + type: string + minLength: 1 + + - name: data + in: body + required: true + schema: + type: object + required: + - name + - projectId + - dataProviderId + - configuration + properties: + name: + type: string + description: Name of the configuration + maxLength: 100 + minLength: 1 + projectId: + type: string + description: Id of the project linked to the configuration + dataProviderId: + type: string + description: Id of the data provider linked to the project id + description: + type: string + configuration: + type: object + description: Key value list with the configuration, specific to the widget + maxProperties: 15 + + responses: + 204: + description: Widget configuration saved + + /app/widgets/{widgetKey}/configurations/{id}: + delete: + summary: Delete a widget configuration + tags: [Widget configurations] + operationId: backend.controller.widgetConfigurations.delete_configuration + parameters: + - name: widgetKey + in: path + type: string + required: true + minLength: 1 + - name: id + in: path + type: string + required: true + description: Id of the configuration to delete + minLength: 1 + responses: + 204: + description: Widget configuration saved + 404: + description: Widget title or configuration not found + + # Data export + /app/exportMethods: + get: + summary: Get the application export methods + tags: [Export] + operationId: backend.controller.exportMethods.get_export_methods + responses: + 200: + description: Export method list + schema: + type: array + items: + type: object + required: + - type + - name + - parameters + - parameterNames + properties: + type: + type: string + description: Export method type + name: + type: string + description: Export method name + minLength: 1 + parameters: + type: array + description: Export method parameters + parametersNames: + type: array + description: Name of the export method parameters according to the export type + + post: + summary: Create an export method for the app + tags: [Export] + operationId: backend.controller.exportMethods.post_export_method + parameters: + - name: data + in: body + schema: + type: object + required: + - type + - name + - parameters + properties: + type: + type: string + description: Export method type + name: + type: string + description: Export method name + minLength: 1 + parameters: + type: array + description: Export method parameters, can be anything as long as the export method type can read it + required: true + responses: + 200: + description: Export method added + 400: + description: Wrong type or parameters + + /app/exportMethods/{exportMethodId}: + delete: + summary: Remove an export method for the app + tags: [Export] + operationId: backend.controller.exportMethods.delete_export_method + parameters: + - name: exportMethodId + in: path + type: string + required: true + responses: + 200: + description: Export method removed + 404: + description: Unknown export method + + /app/exportMethods/{exportMethodId}/exportData: + post: + summary: Export data with an export method + tags: [Export] + operationId: backend.controller.exportMethods.exportData + parameters: + - name: exportMethodId + in: path + type: string + required: true + - name: data + in: body + schema: + type: object + responses: + 200: + description: Data exported + + /data-providers/{dataProviderId}/projects/{projectId}/exportSelection: + post: + summary: Export a selected sample id list from an export method + tags: [Export] + operationId: backend.controller.exportMethods.exportSelection + parameters: + - name: dataProviderId + in: path + type: string + required: true + - name: projectId + in: path + type: string + required: true + - name: data + in: body + schema: + type: object + required: + - sampleHashList + - selectionName + - exportMethodId + properties: + sampleHashList: + type: array + items: + type: string + description: List of the selected sample id (hash) + selectionName: + type: string + exportMethodId: + type: string + annotationValue: + type: string + description: Any value set by the user + responses: + 200: + description: Selection exported + + # Algo providers + /app/algo-providers: + get: + summary: Get all Algo providers and their algorithms + tags: [AlgoProviders] + operationId: backend.controller.algoProviders.get_algo_providers + responses: + 200: + description: Algorithms list + schema: + type: array + items: + type: object + $ref: "#/definitions/algoProvider" + + post: + summary: Add an Algo provider + tags: [AlgoProviders] + operationId: backend.controller.algoProviders.post_algo_provider + parameters: + - name: data + in: body + required: true + schema: + type: object + required: + - name + - url + properties: + name: + type: string + description: Name of the Algo provider + maxLength: 100 + minLength: 1 + url: + type: string + description: Url of the Algo provider + + responses: + 204: + description: Algorithm saved + + /app/algo-providers/{name}: + delete: + summary: Delete an Algo provider + tags: [AlgoProviders] + operationId: backend.controller.algoProviders.delete_algo_provider + parameters: + - name: name + in: path + type: string + required: true + description: Name of the Algo provider to delete + minLength: 1 + responses: + 204: + description: Algo provider deleted + 404: + description: Algo provider not found + + /app/algo-providers/{algoProviderName}/algorithms/use/{algoId}: + post: + summary: Use an algorithm of an Algo provider + tags: [AlgoProviders] + operationId: backend.controller.algoProviders.use_algo + parameters: + - name: algoProviderName + in: path + type: string + required: true + description: Name of the Algo provider to use + minLength: 1 + - name: algoId + in: path + type: string + required: true + description: Id of the algorithm to use + minLength: 1 + - name: data + in: body + required: true + schema: + type: object + required: + - inputs + properties: + inputs: + type: array + description: Inputs of the algorithm + items: + type: object + required: + - name + - value + properties: + name: + type: string + description: Name of the input, + must be the same as the one defined in the input list + value: + description: Value of the input, depending on the input type + + responses: + 200: + description: Algorithm result + schema: + type: object + required: + - outputs + properties: + outputs: + type: array + description: Outputs of the algorithm + items: + type: object + required: + - name + - value + properties: + name: + type: string + description: Name of the output, + must be the same as the one defined in the output list + value: + description: Value of the output, depending on the output type + + # Statistical operations + /statisticalOperations/pearsonCorrelation: + post: + summary: Calculate pearson correlation between rows + tags: [Statistical operations] + operationId: backend.controller.statisticalOperations.pearsonCorrelation + parameters: + - name: data + in: body + description: Array of rows with the same sizes (discrete & continuous) + schema: + type: array + items: + type: array + items: + type: number + + responses: + 200: + description: pearson correlation matrix + schema: + type: array + items: + type: array + items: + type: array + items: + type: number + + 403: + description: Invalid Input array, not the same size + + /statisticalOperations/spearmanCorrelation: + post: + summary: Calculate spearman correlation between rows + tags: [Statistical operations] + operationId: backend.controller.statisticalOperations.spearmanCorrelation + parameters: + - name: data + in: body + description: Array of rows with the same sizes (discrete & continuous) + schema: + type: array + items: + type: array + items: + type: number + + responses: + 200: + description: spearman correlation matrix + schema: + type: array + items: + type: array + items: + type: array + items: + type: number + + 403: + description: Invalid Input array, not the same size + + /statisticalOperations/continuousMutualInformation: + post: + summary: Calculate mutual informations + tags: [Statistical operations] + operationId: backend.controller.statisticalOperations.mutualInformation + parameters: + - name: data + in: body + description: mutual information matrix + schema: + type: object + required: + - list_continuous + - list_discrete + properties: + k: + type: integer + description: number of neighbors + base: + type: integer + description: takes 2 (unit =bits), 10 (unit= nats) + list_continuous: + type: array + items: + type: array + items: + type: number + description: Array of rows with the same sizes (continuous only) + list_discrete: + type: array + items: + type: array + items: + type: number + description: Array of rows with the same sizes (dicrete only) + normalise: + type: string + default: "max" + description: This parameter is used to normalise the mutual information coefficient, it takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + + responses: + 200: + description: mutual information matrix + schema: + type: array + items: + type: array + items: + type: number + 403: + description: Invalid Input array, not the same size + + /statisticalOperations/higherDimensionMutualInformation: + post: + summary: Calculate the mutual information between variables + tags: [Statistical operations] + operationId: backend.controller.statisticalOperations.higherDimensionMutualInformation + parameters: + - name: data + in: body + schema: + type: object + required: + - X + - k + properties: + k: + type: integer + description: number of neighbors (must be < to len(X)) + base: + type: integer + description: takes 2 (unit =bits), 10 (unit= nats) + default: 2 + X: + type: array + items: + type: array + items: + type: number + description: list of list of the variables, it can take more than 2 variables + + responses: + 200: + description: mutual information between the variables + schema: + type: number + 403: + description: Invalid Input + + /statisticalOperations/continuousAndHigherDimensionMutualInformation: + post: + summary: Calculate matrix mutual informations and the higher Dimension + tags: [Statistical operations] + operationId: backend.controller.statisticalOperations.mutualAndHigherInformation + parameters: + - name: data + in: body + description: mutual information matrix + schema: + type: object + required: + - list_continuous + - list_discrete + - k + properties: + k: + type: integer + description: number of neighbors + base: + type: integer + description: takes 2 (unit =bits), 10 (unit= nats) + list_continuous: + type: array + items: + type: array + items: + type: number + description: Array of rows with the same sizes (continuous only) + list_discrete: + type: array + items: + type: array + items: + type: number + description: Array of rows with the same sizes (dicrete only) + normalise: + type: string + default: "max" + description: This parameter is used to normalise the mutual information coefficient, it takes either 'max' or 'min' or 'square root' or 'mean' or 'none' + + responses: + 200: + description: mutual information matrix + schema: + type: array + items: + type: array + items: + type: number + 403: + description: Invalid Input array, not the same size + +definitions: + # Projects + projectOverview: + type: object + required: + - id + - dataProviderId + - name + properties: + id: + type: string + description: project ID + + dataProviderId: + type: string + description: project data provider ID + + name: + type: string + description: project name + + nbModels: + type: integer + description: number of Models + + nbSelections: + type: integer + description: number of selections + + creationDate: + type: string + format: date-time + description: creation date + + updateDate: + type: string + format: date-time + description: last update date + + project: + allOf: + - $ref: "#/definitions/projectOverview" + - type: object + required: + - columns + - resultsStructure + properties: + columns: + description: list of the projects columns that will be used to display the data + type: array + items: + $ref: "#/definitions/column" + + resultsStructure: + description: list of the projects columns that will be used to display the model results + type: array + items: + $ref: "#/definitions/column" + + column: + description: column information, for data or model results + type: object + required: + - name + - type + properties: + name: + type: string + description: column name + category: + type: string + description: column category, by default it is 'other' or 'result' for model results + enum: + - other + - context + - input + - groundtruth + type: + type: string + description: column type + enum: + - auto + - text + - number + - boolean + + group: + type: string + description: column group, used to group columns in the UI + + blockLevelInfo: + type: object + required: + - name + properties: + name: + type: string + description: block level name + + # Artefact + artefact: + type: object + required: + - name + properties: + name: + type: string + creationDate: + type: string + format: date-time + updateDate: + type: string + format: date-time + version: + type: string + metadata: + type: object + additionalProperties: + type: string + example: + meteo: soleil + temperature: 50 + + # Models + modelOverview: + type: object + required: + - name + - id + properties: + name: + type: string + id: + type: string + nbEvaluatedSamples: + type: integer + updateDate: + type: string + format: date-time + creationDate: + type: string + format: date-time + metadata: + type: object + + model: + allOf: + - $ref: "#/definitions/artefact" + - type: object + properties: + hyperParameters: + type: array + items: + type: string + trainingLogs: + type: array + items: + type: string + + # Selections and requests + selection: + allOf: + - $ref: "#/definitions/artefact" + - type: object + properties: + nbSamples: + type: integer + + request: + allOf: + - $ref: "#/definitions/artefact" + - type: object + required: + - filters + properties: + filters: + type: array + items: + type: object + required: + - type + - columnLabel + properties: + type: + type: string + columnLabel: + type: string + description: Can be of type 'values' of 'intervals', 'values' + filters have a 'values' key that is a list of text or numbers + , 'intervals' filters have an 'interval' key that is a list of + {min, max} intervals + + # DataTypes + dataType: + type: object + properties: + gdtList: + type: array + items: + type: object + description: list of ground thruth (key - value) + + inputList: + type: array + items: + type: object + description: list of inputs (key - value) + + contextList: + type: array + items: + type: object + description: list of context (key - value) + + # block & sample + block: + allOf: + - $ref: "#/definitions/dataType" + - type: object + required: + - name + properties: + name: + type: string + + blockOverview: + type: object + required: + - name + properties: + name: + type: string + description: name of the block + + sample: + allOf: + - $ref: "#/definitions/artefact" + - type: object + + blockTree: + type: array + items: + $ref: "#/definitions/block" + + # Data providers + dataProvider: + type: object + properties: + name: + type: string + url: + type: string + status: + type: boolean + description: True if the data provider is up and running + type: + type: string + + # Analysis + analysis: + type: object + description: Data for the current analysis + required: + - id + properties: + id: + type: string + description: Unique ID generated for the analysis, it will be the same in this analysis requests + start: + type: boolean + description: True if this is the first request of the analysis + end: + type: boolean + description: True if this is the last request of the analysis + + # Layout + projectLayout: + description: Project layout, information about a dashboard layout + type: object + required: + - id + - name + - projectId + - dataProviderId + - layout + properties: + id: + type: string + name: + type: string + description: + type: string + projectId: + type: string + description: Id of the project linked to the layout + dataProviderId: + type: string + description: Id of the data provider linked to the project id + creationDate: + type: string + format: date-time + layout: + $ref: "#/definitions/dashboardLayout" + lastLayoutSaved: + type: boolean + description: True if this is the last layout saved + selectedColorColumn: + type: string + description: Column selected to be used as color + x-nullable: true + + dashboardLayout: + description: Dashboard layout, list of widgets with their + position and configuration + + type: array + items: + type: object + required: + - widgetKey + - x + - y + - width + - height + properties: + widgetKey: + type: string + description: Key of the widget + x: + type: integer + description: x position of the widget + y: + type: integer + description: y position of the widget + width: + type: integer + description: width of the widget + height: + type: integer + description: height of the widget + config: + type: object + description: Configuration of the widget + x-nullable: true + localFilters: + type: array + description: Filters applied to the widget + x-nullable: true + items: + type: object + + # Algo providers + algoProvider: + description: Informations about an AlgoProvider + required: + - url + - name + - status + - algorithms + properties: + name: + type: string + description: Name of the AlgoProvider + minLength: 1 + url: + type: string + description: Url of the AlgoProvider + minLength: 1 + status: + type: boolean + description: True if the algo provider is up and running + algorithms: + type: array + description: List of algorithms provided by the AlgoProvider + items: + type: object + description: Informations about an algorithm + required: + - id + - inputs + - outputs + properties: + id: + type: string + description: The id of the algorithm, must be unique, will be used to identify the algorithm + example: "my-algorithm-01" + name: + type: string + description: The name of the algorithm + example: "My algorithm 01" + description: + type: string + description: The description of the algorithm + example: "This algorithm is used to do something" + tags: + type: array + description: The list of tags of the algorithm + items: + type: string + example: ["tag1", "tag2"] + author: + type: string + description: The author of the algorithm + example: "Ada Lovelace" + creationDate: + type: string + description: The creation date of the algorithm, ISO 8601 format, YYYY-MM-DD + example: "2023-01-01" + format: date + x-nullable: true + updateDate: + type: string + description: The last update date of the algorithm, ISO 8601 format, YYYY-MM-DD + example: "2023-03-20" + format: date + x-nullable: true + version: + type: string + description: The version of the algorithm + example: "0.1.0" + inputs: + type: array + description: The list of inputs of the algorithm + items: + type: object + $ref: "#/definitions/algoInputOutput" + outputs: + type: array + description: The list of inputs of the algorithm + items: + type: object + $ref: "#/definitions/algoInputOutput" + + algoInputOutput: + type: object + description: Informations about an input or an output of an algorithm + required: + - name + - type + properties: + name: + type: string + description: The name of the input or output + example: "Input_A" + type: + type: string + enum: + - string + - number + - boolean + - array + + # Next is useless for outputs + availableValues: + type: array + description: The list of available values for this input + example: ["my value", "my other value"] + default: + description: The default value for this input + example: "my value" + min: + type: number + description: The minimum value for number inputs + example: 0 + max: + type: number + description: The maximum value for number inputs + example: 10 + arrayType: + type: string + description: For array inputs, specify type of the array + enum: + - string + - number + - boolean + lengthMin: + type: number + description: The minimum length of the array for array inputs + example: 0 + lengthMax: + type: number + description: The maximum length of the array for array inputs + example: 10 diff --git a/build/lib/backend/tests/__init__.py b/build/lib/backend/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/tests/test_algo_providers.py b/build/lib/backend/tests/test_algo_providers.py new file mode 100644 index 000000000..b13addc5b --- /dev/null +++ b/build/lib/backend/tests/test_algo_providers.py @@ -0,0 +1,95 @@ +import requests +import ujson as json + +appUrl = "http://localhost:3000/" +test_algo_provider_name = "test_create_algo_provider" + +algo_provider_list = [] + + +def test_get_algorithms(): + global algo_provider_list + url = appUrl + "app/algo-providers" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + print(resp.text) + algo_providers = json.loads(resp.text) + assert type(algo_providers) is list + + for algo_provider in algo_providers: + assert type(algo_provider) is dict + assert "name" in algo_provider + assert "url" in algo_provider + assert "status" in algo_provider + assert "algorithms" in algo_provider + + assert type(algo_provider["name"]) is str + assert type(algo_provider["url"]) is str + assert type(algo_provider["status"]) is bool + assert type(algo_provider["algorithms"]) is list + + for algo in algo_provider["algorithms"]: + assert type(algo) is dict + assert "id" in algo + assert "inputs" in algo + assert "outputs" in algo + + assert type(algo["id"]) is str + assert type(algo["inputs"]) is list + assert type(algo["outputs"]) is list + + for input in algo["inputs"]: + assert type(input) is dict + assert "name" in input + assert "type" in input + + assert type(input["name"]) is str + assert type(input["type"]) is str + + for output in algo["outputs"]: + assert type(output) is dict + assert "name" in output + assert "type" in output + + assert type(output["name"]) is str + assert type(output["type"]) is str + + algo_provider_list = algo_providers + + +def test_add_algo_provider(): + url = appUrl + "app/algo-providers" + data = {"name": test_algo_provider_name, "url": "http://localhost:4000"} + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 204 + + # Test that it exists + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + print(resp.text) + algo_providers = json.loads(resp.text) + assert type(algo_providers) is list + assert len(algo_providers) == len(algo_provider_list) + 1 + assert any( + algo_provider["name"] == test_algo_provider_name + for algo_provider in algo_providers + ) + + +def test_delete_algo_provider(): + url = appUrl + "app/algo-providers/" + test_algo_provider_name + resp = requests.delete(url=url, headers={}) + assert resp.status_code == 204 + + # Test that it was removed from the list + url = appUrl + "app/algo-providers" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + print(resp.text) + algo_providers = json.loads(resp.text) + assert type(algo_providers) is list + assert len(algo_providers) == len(algo_provider_list) + assert not any( + algo_provider["name"] == test_algo_provider_name + for algo_provider in algo_providers + ) diff --git a/build/lib/backend/tests/test_data_providers.py b/build/lib/backend/tests/test_data_providers.py new file mode 100644 index 000000000..41563f4dd --- /dev/null +++ b/build/lib/backend/tests/test_data_providers.py @@ -0,0 +1,96 @@ +import requests +import ujson as json + +appUrl = "http://localhost:3000/" +test_data_provider_name = "test_create_data_provider" + +data_providers = [] + + +def test_get_data_providers(): + global data_providers + url = appUrl + "data-providers" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + print(resp.text) + assert type(json.loads(resp.text)) is list + + for dp in json.loads(resp.text): + assert type(dp) is dict + assert "name" in dp + assert "type" in dp + + data_providers = json.loads(resp.text) + + +def test_get_data_provider(): + for dp in data_providers: + url = appUrl + "data-providers/" + dp["name"] + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + + +def test_add_data_provider(): + url = appUrl + "data-providers" + data = {"name": test_data_provider_name, "type": "I DONT EXIST"} + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 400 + + +def test_add_web_data_provider(): + # Test no url + url = appUrl + "data-providers" + data = {"name": test_data_provider_name, "type": "Web"} + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 400 + + # Test bad url + data = {"name": test_data_provider_name, "type": "Web", "url": ""} + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 400 + data = {"name": test_data_provider_name, "type": "Web", "url": "I DONT EXIST"} + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 400 + + # Test good url + data = { + "name": test_data_provider_name, + "type": "Web", + "url": "http://localhost:4000", + } + resp = requests.post(url=url, headers={}, json=data) + assert resp.status_code == 204 + + # Test that it exists + url = appUrl + "data-providers/" + test_data_provider_name + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + + # Test that it was added to the list + url = appUrl + "data-providers" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + assert type(json.loads(resp.text)) is list + assert len(json.loads(resp.text)) == len(data_providers) + 1 + assert any(dp["name"] == test_data_provider_name for dp in json.loads(resp.text)) + + +def test_delete_data_provider(): + url = appUrl + "data-providers/" + test_data_provider_name + resp = requests.delete(url=url, headers={}) + assert resp.status_code == 204 + + # Test that it was removed from the list + url = appUrl + "data-providers" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + assert type(json.loads(resp.text)) is list + assert len(json.loads(resp.text)) == len(data_providers) + assert not any( + dp["name"] == test_data_provider_name for dp in json.loads(resp.text) + ) + + # Test that it no longer exists + url = appUrl + "data-providers/" + test_data_provider_name + resp = requests.get(url=url, headers={}) + assert resp.status_code == 404 diff --git a/build/lib/backend/tests/test_layouts.py b/build/lib/backend/tests/test_layouts.py new file mode 100644 index 000000000..a34b0e5a5 --- /dev/null +++ b/build/lib/backend/tests/test_layouts.py @@ -0,0 +1,131 @@ +import requests +import ujson as json + +appUrl = "http://localhost:3000/" +testLayoutId = None + + +def delete_layout(id): + url = appUrl + "app/layouts/" + id + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 204 + + +def test_get_layouts(): + url = appUrl + "app/layouts" + resp = requests.request("GET", url, headers={}, data={}) + layouts = json.loads(resp.text) + print(layouts) + assert resp.status_code == 200 + assert type(layouts) is list + + # Remove all layouts + for layout in layouts: + assert type(layout) is dict + assert "id" in layout + delete_layout(layout["id"]) + + +def test_add_layout(): + global testLayoutId + url = appUrl + "app/layouts/" + data = { + "name": "testName", + "description": "testDescription", + "projectId": "testProjectId", + "dataProviderId": "testDataProviderId", + "layout": [ + { + "x": 0, + "y": 0, + "width": 1, + "height": 1, + "widgetKey": "testWidgetKey", + "config": {}, + "name": "testName", + } + ], + "selectedColorColumn": "TestColorColumn", + } + resp = requests.request("POST", url, headers={}, json=data) + assert resp.status_code == 204 + + # Check if the layout was added + resp = requests.request("GET", url, headers={}, data={}) + layouts = json.loads(resp.text) + assert resp.status_code == 200 + assert type(layouts) is list + assert len(layouts) == 1 + assert layouts[0]["name"] == data["name"] + assert layouts[0]["description"] == data["description"] + assert layouts[0]["projectId"] == data["projectId"] + assert layouts[0]["dataProviderId"] == data["dataProviderId"] + assert type(layouts[0]["layout"]) is list + assert len(layouts[0]["layout"]) == 1 + assert layouts[0]["layout"][0]["x"] == data["layout"][0]["x"] + assert layouts[0]["layout"][0]["y"] == data["layout"][0]["y"] + assert layouts[0]["selectedColorColumn"] == data["selectedColorColumn"] + + assert "id" in layouts[0] + assert type(layouts[0]["id"]) is str + assert len(layouts[0]["id"]) > 0 + testLayoutId = layouts[0]["id"] + + +def test_delete_layout(): + # Remove the layout + delete_layout(testLayoutId) + + # Check if the layout was removed + url = appUrl + "app/layouts/" + resp = requests.request("GET", url, headers={}, data={}) + layouts = json.loads(resp.text) + assert resp.status_code == 200 + assert type(layouts) is list + assert len(layouts) == 0 + + +def test_last_layout_saved(): + # if lastLayoutSaved is true, the previous layout with lastLayoutSaved = true + # should be deleted + + # Add the first layout + url = appUrl + "app/layouts/" + data = { + "name": "testName", + "description": "testDescription", + "projectId": "testProjectId", + "dataProviderId": "testDataProviderId", + "lastLayoutSaved": True, + "layout": [], + } + resp = requests.request("POST", url, headers={}, json=data) + assert resp.status_code == 204 + + # Add the second layout + data = { + "name": "testName2", + "description": "testDescription", + "projectId": "testProjectId", + "dataProviderId": "testDataProviderId", + "lastLayoutSaved": True, + "layout": [], + } + resp = requests.request("POST", url, headers={}, json=data) + assert resp.status_code == 204 + + # Check if the first layout was removed + url = appUrl + "app/layouts/" + resp = requests.request("GET", url, headers={}, data={}) + layouts = json.loads(resp.text) + assert resp.status_code == 200 + assert type(layouts) is list + assert len(layouts) == 1 + assert layouts[0]["lastLayoutSaved"] is True + assert layouts[0]["name"] == data["name"] + assert layouts[0]["description"] == data["description"] + assert layouts[0]["projectId"] == data["projectId"] + assert layouts[0]["dataProviderId"] == data["dataProviderId"] + + # Remove the layout + delete_layout(layouts[0]["id"]) diff --git a/build/lib/backend/tests/test_pythonModuleDataProvider.py b/build/lib/backend/tests/test_pythonModuleDataProvider.py new file mode 100644 index 000000000..b81567895 --- /dev/null +++ b/build/lib/backend/tests/test_pythonModuleDataProvider.py @@ -0,0 +1,137 @@ +import requests +import ujson as json + +PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" +appUrl = "http://localhost:3000/" +test_project_name = "test_create_project" +test_project_id = None + +# ============== PROJECTS ================= + + +def test_get_projects(): + url = appUrl + "projects" + resp = requests.get(url=url, headers={}) + assert resp.status_code == 200 + assert type(json.loads(resp.text)) is list + + +def test_get_bad_project(): + projectId = "I_DO_NOT_EXIST" + url = ( + appUrl + "data-providers/" + PYTHON_DATA_PROVIDER_ID + "/projects/" + projectId + ) + resp = requests.request("GET", url, headers={}, data={}) + assert resp.status_code == 404 + + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 404 + + +def test_create_project_noName(): + # create + url = appUrl + "projects" + resp = requests.post(url=url, headers={}, json={}) + assert resp.status_code == 400 + + +def test_create_project(): + global test_project_id + # delete if exists + projectId = test_project_name + url = ( + appUrl + "data-providers/" + PYTHON_DATA_PROVIDER_ID + "/projects/" + projectId + ) + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 200 or resp.status_code == 404 + + # create + url = appUrl + "projects" + resp = requests.post(url=url, headers={}, json={"projectName": test_project_name}) + assert resp.status_code == 200 + + # Get Id + data = json.loads(resp.text) + test_project_id = data["id"] + assert test_project_id is not None + assert len(test_project_id) > 0 + assert type(test_project_id) is str + + # Test can't create same project + resp = requests.post(url=url, headers={}, json={"projectName": test_project_name}) + assert resp.status_code == 400 + assert "already exists" in resp.text + + +def test_get_project(): + # Find back + url = ( + appUrl + + "data-providers/" + + PYTHON_DATA_PROVIDER_ID + + "/projects/" + + test_project_id + ) + resp = requests.request("GET", url, headers={}, json={}) + assert resp.status_code == 200 + proj = json.loads(resp.text) + assert type(proj) is dict + assert type(proj["columns"]) is list + assert proj["models"] == [] + assert len(proj["name"]) > 0 + assert proj["name"] == test_project_name + + +def test_remove_project(): + # Project exists back + url = ( + appUrl + + "data-providers/" + + PYTHON_DATA_PROVIDER_ID + + "/projects/" + + test_project_id + ) + resp = requests.request("GET", url, headers={}, json={}) + assert resp.status_code == 200 + + # remove + url = ( + appUrl + + "data-providers/" + + PYTHON_DATA_PROVIDER_ID + + "/projects/" + + test_project_id + ) + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 200 + + # Dont Find back + url = ( + appUrl + + "data-providers/" + + PYTHON_DATA_PROVIDER_ID + + "/projects/" + + test_project_id + ) + resp = requests.request("GET", url, headers={}, json={}) + assert resp.status_code == 404 + + # Cant remove again + url = ( + appUrl + + "data-providers/" + + PYTHON_DATA_PROVIDER_ID + + "/projects/" + + test_project_id + ) + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 404 + + +def test_project_nameTooLong(): + testProjectName = "a" * 256 + url = appUrl + "projects" + payload = {"projectName": testProjectName, "blockLevelInfo": []} + headers = {"content-type": "application/json"} + resp = requests.post(url=url, headers=headers, json=payload) + assert resp.status_code == 400 diff --git a/build/lib/backend/tests/test_widget_configurations.py b/build/lib/backend/tests/test_widget_configurations.py new file mode 100644 index 000000000..34c2a7613 --- /dev/null +++ b/build/lib/backend/tests/test_widget_configurations.py @@ -0,0 +1,85 @@ +import requests +import ujson as json + +appUrl = "http://localhost:3000/" +testWidgetKey = "testWidgetKey" +testConfigurationId = None + + +def delete_configuration(id): + url = appUrl + "app/widgets/" + testWidgetKey + "/configurations/" + id + resp = requests.request("DELETE", url, headers={}, data={}) + assert resp.status_code == 204 + + +def test_get_configurations(): + url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" + resp = requests.request("GET", url, headers={}, data={}) + configurations = json.loads(resp.text) + print(configurations) + assert resp.status_code == 200 + assert type(configurations) is list + + # Remove all configurations + for conf in configurations: + assert type(conf) is dict + assert "id" in conf + delete_configuration(conf["id"]) + + +def test_add_configuration(): + global testConfigurationId + url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" + data = { + "name": "testName", + "description": "testDescription", + "projectId": "testProjectId", + "dataProviderId": "testDataProviderId", + "configuration": {"testKey": "testValue"}, + } + resp = requests.request("POST", url, headers={}, json=data) + assert resp.status_code == 204 + + # Check if the configuration was added + resp = requests.request("GET", url, headers={}, data={}) + configurations = json.loads(resp.text) + assert resp.status_code == 200 + assert type(configurations) is list + assert len(configurations) == 1 + assert configurations[0]["name"] == data["name"] + assert configurations[0]["description"] == data["description"] + assert configurations[0]["projectId"] == data["projectId"] + assert configurations[0]["dataProviderId"] == data["dataProviderId"] + assert type(configurations[0]["configuration"]) is dict + assert ( + configurations[0]["configuration"]["testKey"] + == data["configuration"]["testKey"] + ) + + assert "id" in configurations[0] + assert type(configurations[0]["id"]) is str + assert len(configurations[0]["id"]) > 0 + testConfigurationId = configurations[0]["id"] + + +def test_get_configurations_overview(): + url = appUrl + "app/widget-configurations" + resp = requests.request("GET", url, headers={}, data={}) + configurations = json.loads(resp.text) + assert resp.status_code == 200 + assert type(configurations) is dict + assert testWidgetKey in configurations + assert configurations[testWidgetKey] == 1 + + +def test_delete_configuration(): + # Remove the configuration + delete_configuration(testConfigurationId) + + # Check if the configuration was removed + url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" + resp = requests.request("GET", url, headers={}, data={}) + configurations = json.loads(resp.text) + assert resp.status_code == 200 + assert type(configurations) is list + assert len(configurations) == 0 diff --git a/build/lib/backend/utils/__init__.py b/build/lib/backend/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/utils/layouts/__init__.py b/build/lib/backend/utils/layouts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/utils/layouts/layouts.py b/build/lib/backend/utils/layouts/layouts.py new file mode 100644 index 000000000..0fdc654e3 --- /dev/null +++ b/build/lib/backend/utils/layouts/layouts.py @@ -0,0 +1,149 @@ +import os +import json +import backend.utils.utils as utils +import uuid + + +LAYOUTS_PATH = "data/layouts.json" + + +# Layouts file structure +# [ +# { +# "id": "" +# "name": "" +# "description": "", +# "projectId": "" +# "dataProviderId": "" +# "creationDate": 0 +# "layout": [ +# # Widget position +# { +# "widgetKey": "parallelCoordinate", +# "x": 0, +# "y": 0, +# "width": 0, +# "height": 0, +# "config": {}, # Widget config (optional) +# "name": "", # Name given to the widget (optional) +# "localFilters" : [{}], +# }, +# ], +# "selectedColorColumn": "col", # (optional) +# }, +# ... +# ] + + +def setup_layouts(): + # Create the folder if it does not exist + if not os.path.exists("data"): + os.mkdir("data") + + # Create the file if it does not exist + if not os.path.exists(LAYOUTS_PATH): + with open(LAYOUTS_PATH, "w") as json_file: + json.dump([], json_file) + + +def get_layouts(): + # Return the layouts list + try: + with open(LAYOUTS_PATH) as json_file: + return json.load(json_file) + + except FileNotFoundError: + setup_layouts() + return [] + + except json.decoder.JSONDecodeError as e: + print("Error while reading the layouts file") + print(e) + print("The file will be reset") + _save_layouts([]) + return [] + + +def add_layout(data): + # project_id, data_provider_id, conf_description, conf_name, conf + # Add a new widget layout + # Generate id + id = str(uuid.uuid1()) + + layout_to_add = [] + + for widget in data["layout"]: + widget_position = { + "x": widget["x"], + "y": widget["y"], + "width": widget["width"], + "height": widget["height"], + "widgetKey": widget["widgetKey"], + } + + keys = ["config", "name", "localFilters"] + + for key in keys: + if key in widget: + widget_position[key] = widget[key] + + layout_to_add.append(widget_position) + + file_to_add = { + "id": id, + "name": data["name"], + "description": data["description"], + "projectId": data["projectId"], + "dataProviderId": data["dataProviderId"], + "creationDate": utils.timeNow(), + "layout": layout_to_add, + "lastLayoutSaved": False, + } + + if "selectedColorColumn" in data: + file_to_add["selectedColorColumn"] = data["selectedColorColumn"] + + layouts = get_layouts() + + # Check if their is already a "last saved" layout + if "lastLayoutSaved" in data and data["lastLayoutSaved"]: + file_to_add["lastLayoutSaved"] = True + + for layout in layouts: + if ( + layout["projectId"] == data["projectId"] + and layout["dataProviderId"] == data["dataProviderId"] + and "lastLayoutSaved" in layout + and layout["lastLayoutSaved"] + ): + # Remove the "last saved" layout + layouts.remove(layout) + + # Save layout + layouts.append(file_to_add) + _save_layouts(layouts) + + +def delete_layout(id): + # Delete the widget layout by its name + layouts = get_layouts() + + for layout in layouts: + if layout["id"] == id: + layouts.remove(layout) + + _save_layouts(layouts) + + +def _save_layouts(layouts, retry=False): + # Update the json file + try: + with open(LAYOUTS_PATH, "w") as json_file: + json.dump(layouts, json_file) + except FileNotFoundError: + if not retry: + setup_layouts() + _save_layouts(layouts, True) + else: + print("Error while saving the layouts file") + print("The file will not be saved") diff --git a/build/lib/backend/utils/utils.py b/build/lib/backend/utils/utils.py new file mode 100644 index 000000000..a131b89de --- /dev/null +++ b/build/lib/backend/utils/utils.py @@ -0,0 +1,46 @@ +import time +import yaml +from yaml.loader import SafeLoader +from urllib.parse import urlparse + + +def get_app_version(): + # Read the version from the API YAML file + try: + with open("swagger.yaml") as f: + data = yaml.load(f, Loader=SafeLoader) + return data["info"]["version"] + except Exception as e: + print(e) + return "?.?.?" + + +# Date +def timeNow(): + return time.time() * 1000 + + +# Url +def is_url_valid(url): + try: + result = urlparse(url) + return all([result.scheme, result.netloc]) + except Exception: + return False + + +# Name +def is_valid_name(name): + # /, &, | are not allowed in data-providers & algo-providers names + if ( + "/" in name + or "&" in name + or "|" in name + or len(name) == 0 + or len(name) > 50 + or name[0] == " " + or name[-1] == " " + ): + return False + + return True diff --git a/build/lib/backend/utils/widgetConfigurations/__init__.py b/build/lib/backend/utils/widgetConfigurations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py b/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py new file mode 100644 index 000000000..8fddf1d59 --- /dev/null +++ b/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py @@ -0,0 +1,123 @@ +import os +import json +import backend.utils.utils as utils +import uuid + +CONF_PATH = "data/widgetConfigurations.json" + +# Configuration file structure +# { +# "widgetKey": [ +# { +# "id": "" +# "name": "" +# "description": "", +# "projectId": "" +# "dataProviderId": "" +# "creationDate": 0 +# +# "configuration": {}, +# }, +# ... +# ] +# } + + +def setup_widget_configurations(): + # Create the folder if it does not exist + if not os.path.exists("data"): + os.mkdir("data") + + # Create the file if it does not exist + if not os.path.exists(CONF_PATH): + _save_configurations({}) + + +def get_configurations_overview(): + # Return the number of configurations for each widget + all_configurations = _get_all_configurations() + + configurations_overview = {} + for widget_key in all_configurations: + configurations_overview[widget_key] = len(all_configurations[widget_key]) + + return configurations_overview + + +def get_configurations(widget_key): + # Return the configurations list of the widget + all_configurations = _get_all_configurations() + + if widget_key in all_configurations: + return all_configurations[widget_key] + else: + return [] + + +def add_configuration(widget_key, data): + # project_id, data_provider_id, conf_description, conf_name, conf + # Add a new widget configuration + configurations = _get_all_configurations() + + if widget_key not in configurations: + configurations[widget_key] = [] + + # Generate id + id = str(uuid.uuid1()) + + configuration_to_add = { + "id": id, + "name": data["name"], + "description": data["description"], + "projectId": data["projectId"], + "dataProviderId": data["dataProviderId"], + "creationDate": utils.timeNow(), + "configuration": data["configuration"], + } + + # Save configuration + configurations[widget_key].append(configuration_to_add) + _save_configurations(configurations) + + +def delete_configuration(widget_key, id): + # Delete the widget configuration by its name + configurations = _get_all_configurations() + + if widget_key in configurations: + for configuration in configurations[widget_key]: + if configuration["id"] == id: + configurations[widget_key].remove(configuration) + + _save_configurations(configurations) + + +def _get_all_configurations(): + # Return the configurations list of all widgets + try: + with open(CONF_PATH) as json_file: + return json.load(json_file) + + except FileNotFoundError: + setup_widget_configurations() + return {} + except json.decoder.JSONDecodeError as e: + print("Error while reading the widget configurations file") + print(e) + print("The file will be reset") + _save_configurations({}) + return {} + + +def _save_configurations(conf, retry=False): + # Update the json file + try: + with open(CONF_PATH, "w") as json_file: + json.dump(conf, json_file) + except FileNotFoundError: + if not retry: + setup_widget_configurations() + _save_configurations(conf, True) + else: + print("Error while saving the widget configurations file") + print("The file will not be saved") diff --git a/build/lib/backend/websrv.py b/build/lib/backend/websrv.py new file mode 100644 index 000000000..22db667d4 --- /dev/null +++ b/build/lib/backend/websrv.py @@ -0,0 +1,78 @@ +import connexion +import os +import requests +from termcolor import colored +from flask_cors import CORS +from flask import send_from_directory, request, Response +from init import init +from backend.utils.utils import get_app_version +from backend.config.init_config import DEBUG_COLOR + +DEV_FRONTEND_URL = "http://localhost:8080/" +PORT = 3000 + + +app = connexion.App(__name__) +app.add_api("swagger.yaml", strict_validation=True) +CORS(app.app) + + +def send_frontend(path): + if path == "/": + path = "index.html" + + # If production, use the index.html from the dist folder + if os.getenv("FLASK_ENV") == "production": + return send_from_directory("dist", path) + + # In development, redirect to the DEV_FRONTEND_URL + else: + if request.method == "GET": + try: + resp = requests.get(f"{DEV_FRONTEND_URL}{path}") + excluded_headers = [ + "content-encoding", + "content-length", + "transfer-encoding", + "connection", + ] + headers = [ + (name, value) + for (name, value) in resp.raw.headers.items() + if name.lower() not in excluded_headers + ] + response = Response(resp.content, resp.status_code, headers) + return response + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): + return ( + "You are in a development environment and the DebAI frontend" + + "is not available at the url : " + + DEV_FRONTEND_URL, + 503, + ) + else: + print("Unexpected request method") + + +# For serving the dashboard +@app.route("/") +def send_index(): + return send_frontend("/") + + +# For serving the dashboard assets +@app.route("/") +def send_supporting_elements(path): + return send_frontend(path) + + +if __name__ == "__main__": + # Run DebiAI init + print("================= DebiAI " + get_app_version() + " ====================") + init() + print("======================== RUN =======================") + print( + " DebiAI is available at " + + colored("http://localhost:" + str(PORT), DEBUG_COLOR) + ) + app.run(port=PORT, debug=True) diff --git a/build_and_run.sh b/build_and_run.sh new file mode 100755 index 000000000..f6813be73 --- /dev/null +++ b/build_and_run.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +# Nettoyage des anciennes constructions +echo "Nettoyage des anciennes constructions..." +rm -rf build dist backend.egg-info + +# Construction du package +echo "Construction du package..." +python3 setup.py sdist bdist_wheel + +# Installation du package localement +echo "Installation du package localement..." +pip install . + +# Démarrage du serveur +echo "Démarrage du serveur..." +debiai-start diff --git a/cspell.json b/cspell.json index d6e1abc35..8babf04d1 100644 --- a/cspell.json +++ b/cspell.json @@ -54,7 +54,8 @@ "Valeo", "vuejs", "Vuex", - "websrv" + "websrv", + "werkzeug" ], "flagWords": [], "ignorePaths": [ diff --git a/data/layouts.json b/data/layouts.json new file mode 100644 index 000000000..0637a088a --- /dev/null +++ b/data/layouts.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/data/widgetConfigurations.json b/data/widgetConfigurations.json new file mode 100644 index 000000000..9e26dfeeb --- /dev/null +++ b/data/widgetConfigurations.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..afa830e7a --- /dev/null +++ b/setup.py @@ -0,0 +1,41 @@ +from setuptools import setup, find_packages + +setup( + name="backend", + version="0.1.0", + packages=find_packages(include=["backend", "backend.*"]), + include_package_data=True, + install_requires=[ + "Flask==2.0.3", + "flask_cors==3.0.8", + "connexion==2.6.0", + "requests==2.25.1", + "swagger-ui-bundle==0.0.5", + "pandas==1.5.1", + "scipy==1.9.3", + "ujson==5.8.0", + "sklearn==0.0", + "kafka-python==2.0.2", + "openapi_spec_validator==0.2.8", + "PyYAML==6.0", + "cacheout==0.14.1", + "termcolor==2.3.0", + "werkzeug==2.2.2", + ], + entry_points={ + "console_scripts": [ + "debiai-start=backend.server:run", + ], + }, + author="Fady Bekkar", + author_email="fady.bekkar@irt-systemx.fr", + description="Python module that allows users to have a standalone version " + "DebiAI.", + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + url="https://github.com/debiai/DebiAI", + classifiers=[ + "Programming Language :: Python :: 3", + ], + python_requires=">=3.6", +) From f95c388cb19d5b2878f34d82fe30b73054eaae84 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:05:02 +0200 Subject: [PATCH 02/97] removed unwanted data files from git repo --- .gitignore | 9 ++ backend.egg-info/PKG-INFO | 98 ------------------- backend.egg-info/SOURCES.txt | 85 ---------------- backend.egg-info/dependency_links.txt | 1 - backend.egg-info/entry_points.txt | 2 - backend.egg-info/requires.txt | 15 --- backend.egg-info/top_level.txt | 1 - backend/backend.py | 22 +++++ backend/config/init_config.py | 2 +- .../algorithms/classificationErrorMetric.py | 6 +- .../algorithms/regressionErrorMetric.py | 7 +- .../integratedAlgoProvider.py | 2 +- backend/utils/utils.py | 2 +- build_and_run.sh | 1 - data/layouts.json | 1 - data/widgetConfigurations.json | 1 - setup.py | 4 +- 17 files changed, 47 insertions(+), 212 deletions(-) delete mode 100644 backend.egg-info/PKG-INFO delete mode 100644 backend.egg-info/SOURCES.txt delete mode 100644 backend.egg-info/dependency_links.txt delete mode 100644 backend.egg-info/entry_points.txt delete mode 100644 backend.egg-info/requires.txt delete mode 100644 backend.egg-info/top_level.txt delete mode 100644 data/layouts.json delete mode 100644 data/widgetConfigurations.json diff --git a/.gitignore b/.gitignore index 839413e91..591d993da 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,12 @@ __pycache__/ # Front frontend/node_modules/ + +# Ignore the notebookDebiai.ipynb file +notebookDebiai.ipynb + +# Ignore the test_imports.py file +test_imports.py + +# Ignore the tree_project.md file +tree_project.md diff --git a/backend.egg-info/PKG-INFO b/backend.egg-info/PKG-INFO deleted file mode 100644 index dc70b2be1..000000000 --- a/backend.egg-info/PKG-INFO +++ /dev/null @@ -1,98 +0,0 @@ -Metadata-Version: 2.1 -Name: backend -Version: 0.1.0 -Summary: Python module that allows users to have a standalone version DebiAI. -Home-page: https://github.com/debiai/DebiAI -Author: Fady Bekkar -Author-email: fady.bekkar@irt-systemx.fr -Classifier: Programming Language :: Python :: 3 -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE - -
- - -[![Online documentation](https://img.shields.io/static/v1?label=&message=Online documentation&color=0077de)](https://debiai.irt-systemx.fr/) -
-[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) -![cd](https://github.com/debiai/debiai/actions/workflows/docker-push.yml/badge.svg) -
-![Activity](https://img.shields.io/github/commit-activity/m/debiai/debiai) -![Last commit](https://img.shields.io/github/last-commit/debiai/debiai) -
-[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) -[![Code style: flake8](https://img.shields.io/badge/code%20style-flake8-1c4a6c.svg)](https://flake8.pycqa.org/en/latest/) -[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier) - -
- -## Why DebiAI ? - -DebiAI is an open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. - -DebiAI provides data scientists with features to: - -- Identify biases and errors in your input, results, contextual or ground truth project data -- Make a comparison of the performance of your ML according to their contextual results -- Select and create sets of data graphically for further analysis or (re-)training purposes -- Quickly create and share statistical visualizations of your project data for your team or client - -## Documentation - -The full documentation is available on the [DebiAI website](https://debiai.irt-systemx.fr/). - -## Dashboard - -DebiAI has a Web Graphical User Interface with a complete data visualization toolkit offering many statistical analysis tools: - -

- -

- -The dashboard is highly customizable and can be used for large and small projects. Learn more about the [widgets and how to use them](https://debiai.irt-systemx.fr/dashboard/widgets/). - -## Data - -DebiAI is designed to be used for any kind projects and data, it is particularly useful for projects that involve many contextual data. - -DebiAI provide two main ways to import your data: - -- A [DebiAI Python module](https://debiai.irt-systemx.fr/dataInsertion/pythonModule/) is provided to insert, directly from your Python workflow, the data and model results that you want to study. -- You can also create a [Data Provider](https://debiai.irt-systemx.fr/dataInsertion/dataProviders/), a Web API that will allow DebiAI to reach your data and model results from any programming language and any data sources without duplication. - Check out the [DebiAI Data Provider NodeJs template](https://github.com/debiai/data-provider-nodejs-template) for an example of a Data Provider. - -## Installation - -DebiAI is available as a Docker image. To install it, you can follow the [installation guide](https://debiai.irt-systemx.fr/introduction/gettingStarted/installation). - -## Use cases - -As part of the [Confiance.ai](https://www.confiance.ai/) program, we (the [IRT SystemX](https://www.irt-systemx.fr/)) are using and developing DebiAI for a wide range of use cases. - -One of them is the [Valeo - WoodScape](https://woodscape.valeo.com/) dataset: - -### Valeo - WoodScape - -The Valeo - WoodScape dataset is an annotated image dataset taken from 4 fisheye cameras. DebiAI is used to analyze the dataset for biases and outliers in the data. - -

- -

- -Withing the [Confiance.ai](https://www.confiance.ai/) program, DebiAI has been able to import the project data, detect biases, find annotations errors and export them to the project's image annotation tool. - ---- - -

- DebiAI is developed by - - - - And is integrated in - - - -

- ---- diff --git a/backend.egg-info/SOURCES.txt b/backend.egg-info/SOURCES.txt deleted file mode 100644 index abd98b8ae..000000000 --- a/backend.egg-info/SOURCES.txt +++ /dev/null @@ -1,85 +0,0 @@ -LICENSE -MANIFEST.in -README.md -setup.py -backend/__init__.py -backend/backend.py -backend/init.py -backend/server.py -backend/swagger.yaml -backend/websrv.py -backend.egg-info/PKG-INFO -backend.egg-info/SOURCES.txt -backend.egg-info/dependency_links.txt -backend.egg-info/entry_points.txt -backend.egg-info/requires.txt -backend.egg-info/top_level.txt -backend/config/__init__.py -backend/config/init_config.py -backend/controller/__init__.py -backend/controller/algoProviders.py -backend/controller/data.py -backend/controller/dataProviders.py -backend/controller/exportMethods.py -backend/controller/layouts.py -backend/controller/models.py -backend/controller/projects.py -backend/controller/pythonModuleDp.py -backend/controller/selection.py -backend/controller/statisticalOperations.py -backend/controller/widgetConfigurations.py -backend/modules/__init__.py -backend/modules/algoProviders/AlgoProvider.py -backend/modules/algoProviders/AlgoProviderException.py -backend/modules/algoProviders/__init__.py -backend/modules/algoProviders/algoProvidersManager.py -backend/modules/algoProviders/integratedAlgoProvider/__init__.py -backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py -backend/modules/algoProviders/integratedAlgoProvider/utils.py -backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py -backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py -backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py -backend/modules/dataProviders/DataProvider.py -backend/modules/dataProviders/DataProviderException.py -backend/modules/dataProviders/__init__.py -backend/modules/dataProviders/dataProviderManager.py -backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py -backend/modules/dataProviders/pythonDataProvider/__init__.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py -backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py -backend/modules/dataProviders/webDataProvider/WebDataProvider.py -backend/modules/dataProviders/webDataProvider/__init__.py -backend/modules/dataProviders/webDataProvider/cache/__init__.py -backend/modules/dataProviders/webDataProvider/cache/cache.py -backend/modules/dataProviders/webDataProvider/http/__init__.py -backend/modules/dataProviders/webDataProvider/http/api.py -backend/modules/dataProviders/webDataProvider/useCases/__init__.py -backend/modules/dataProviders/webDataProvider/useCases/data.py -backend/modules/dataProviders/webDataProvider/useCases/models.py -backend/modules/dataProviders/webDataProvider/useCases/projects.py -backend/modules/dataProviders/webDataProvider/useCases/selections.py -backend/modules/exportMethods/__init__.py -backend/modules/exportMethods/exportClass.py -backend/modules/exportMethods/exportUtils.py -backend/modules/exportMethods/methods/__init__.py -backend/modules/exportMethods/methods/kafkaUtils.py -backend/modules/exportMethods/methods/postUtils.py -backend/tests/__init__.py -backend/tests/test_algo_providers.py -backend/tests/test_data_providers.py -backend/tests/test_layouts.py -backend/tests/test_pythonModuleDataProvider.py -backend/tests/test_widget_configurations.py -backend/utils/__init__.py -backend/utils/utils.py -backend/utils/layouts/__init__.py -backend/utils/layouts/layouts.py -backend/utils/widgetConfigurations/__init__.py -backend/utils/widgetConfigurations/widgetConfigurations.py \ No newline at end of file diff --git a/backend.egg-info/dependency_links.txt b/backend.egg-info/dependency_links.txt deleted file mode 100644 index 8b1378917..000000000 --- a/backend.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/backend.egg-info/entry_points.txt b/backend.egg-info/entry_points.txt deleted file mode 100644 index 5639d1fa0..000000000 --- a/backend.egg-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -debiai-start = backend.server:run diff --git a/backend.egg-info/requires.txt b/backend.egg-info/requires.txt deleted file mode 100644 index 33fff5e2a..000000000 --- a/backend.egg-info/requires.txt +++ /dev/null @@ -1,15 +0,0 @@ -Flask==2.0.3 -flask_cors==3.0.8 -connexion==2.6.0 -requests==2.25.1 -swagger-ui-bundle==0.0.5 -pandas==1.5.1 -scipy==1.9.3 -ujson==5.8.0 -sklearn==0.0 -kafka-python==2.0.2 -openapi_spec_validator==0.2.8 -PyYAML==6.0 -cacheout==0.14.1 -termcolor==2.3.0 -werkzeug==2.2.2 diff --git a/backend.egg-info/top_level.txt b/backend.egg-info/top_level.txt deleted file mode 100644 index e34d8c321..000000000 --- a/backend.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -backend diff --git a/backend/backend.py b/backend/backend.py index 7429936da..2a0ed6337 100644 --- a/backend/backend.py +++ b/backend/backend.py @@ -1,7 +1,10 @@ import connexion import os import requests +import webbrowser +import psutil from termcolor import colored +from threading import Timer from flask_cors import CORS from flask import send_from_directory, request, Response from backend.init import init @@ -67,6 +70,24 @@ def send_supporting_elements(path): return app +def is_browser_open(): + """Check if a browser process is running.""" + browser_keywords = ["chrome", "firefox", "safari", "edge", "opera"] + for proc in psutil.process_iter(["pid", "name"]): + for keyword in browser_keywords: + if keyword in proc.info["name"].lower(): + return True + return False + + +def open_browser(): + url = f"http://localhost:{PORT}" + if is_browser_open(): + webbrowser.open_new_tab(url) + else: + webbrowser.open(url) + + def start_server(): # Run DebiAI init print("================= DebiAI " + get_app_version() + " ====================") @@ -77,4 +98,5 @@ def start_server(): + colored("http://localhost:" + str(PORT), DEBUG_COLOR) ) app = create_app() + Timer(1, open_browser).start() app.run(port=PORT, debug=True) diff --git a/backend/config/init_config.py b/backend/config/init_config.py index f8663f715..bbb7e5959 100644 --- a/backend/config/init_config.py +++ b/backend/config/init_config.py @@ -3,7 +3,7 @@ import os -config_path = "config/config.ini" +config_path = "backend/config/config.ini" config_parser = ConfigParser() DEBUG_COLOR = "light_blue" diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py b/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py index df545d8da..db6e675ec 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py +++ b/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py @@ -1,4 +1,8 @@ -from ..utils import get_input_from_inputs +from backend.modules.algoProviders.integratedAlgoProvider.utils import ( + get_input_from_inputs, +) + +# from ..utils import get_input_from_inputs # This algorithm is a simple classification metric calculator # It takes a list of values corresponding to the ground truth diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py b/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py index 30315775e..035ad8a59 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py +++ b/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py @@ -1,4 +1,9 @@ -from ..utils import get_input_from_inputs +from backend.modules.algoProviders.integratedAlgoProvider.utils import ( + get_input_from_inputs, +) + +# from ..utils import get_input_from_inputs + # This algorithm is a simple regression metric calculator # It takes a list of numbers corresponding to an error diff --git a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py index 4b27ea380..9386f3444 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -29,7 +29,7 @@ def _get_algorithm_python(algorithm_name): # Import the algorithm algorithm_python = __import__( - "modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, + "backend.modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, fromlist=["*"], ) diff --git a/backend/utils/utils.py b/backend/utils/utils.py index a131b89de..2849502f3 100644 --- a/backend/utils/utils.py +++ b/backend/utils/utils.py @@ -7,7 +7,7 @@ def get_app_version(): # Read the version from the API YAML file try: - with open("swagger.yaml") as f: + with open("backend/swagger.yaml") as f: data = yaml.load(f, Loader=SafeLoader) return data["info"]["version"] except Exception as e: diff --git a/build_and_run.sh b/build_and_run.sh index f6813be73..28e920178 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -13,5 +13,4 @@ echo "Installation du package localement..." pip install . # Démarrage du serveur -echo "Démarrage du serveur..." debiai-start diff --git a/data/layouts.json b/data/layouts.json deleted file mode 100644 index 0637a088a..000000000 --- a/data/layouts.json +++ /dev/null @@ -1 +0,0 @@ -[] \ No newline at end of file diff --git a/data/widgetConfigurations.json b/data/widgetConfigurations.json deleted file mode 100644 index 9e26dfeeb..000000000 --- a/data/widgetConfigurations.json +++ /dev/null @@ -1 +0,0 @@ -{} \ No newline at end of file diff --git a/setup.py b/setup.py index afa830e7a..8a6aa463c 100644 --- a/setup.py +++ b/setup.py @@ -29,8 +29,8 @@ }, author="Fady Bekkar", author_email="fady.bekkar@irt-systemx.fr", - description="Python module that allows users to have a standalone version " - "DebiAI.", + description="Python module that allows users to have a standalone DebiAI" + "version.", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/debiai/DebiAI", From 8bd3e7c8fba885067f62b7fe3ef99bd5410acf08 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:06:05 +0200 Subject: [PATCH 03/97] updated .gitignore --- .gitignore | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.gitignore b/.gitignore index 591d993da..7d1d2c6b6 100644 --- a/.gitignore +++ b/.gitignore @@ -7,9 +7,15 @@ backend/data .coverage __pycache__/ +# Backend.egg-info +backend.egg-info/ + # Front frontend/node_modules/ +# Data +data/ + # Ignore the notebookDebiai.ipynb file notebookDebiai.ipynb @@ -18,3 +24,6 @@ test_imports.py # Ignore the tree_project.md file tree_project.md + +# Ignore the build and run file +build_and_run.sh From 4e44b646a9d1a40302c79ea42685985f9ecb741d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:15:49 +0200 Subject: [PATCH 04/97] formated necessary files --- backend/controller/dataProviders.py | 4 +++- backend/modules/algoProviders/algoProvidersManager.py | 7 ++++++- .../dataProviders/pythonDataProvider/dataUtils/projects.py | 5 ++++- .../dataProviders/pythonDataProvider/dataUtils/tags.py | 5 ++++- .../dataProviders/webDataProvider/useCases/projects.py | 4 +++- 5 files changed, 20 insertions(+), 5 deletions(-) diff --git a/backend/controller/dataProviders.py b/backend/controller/dataProviders.py index 503a0fa21..ae3cbaeff 100644 --- a/backend/controller/dataProviders.py +++ b/backend/controller/dataProviders.py @@ -2,7 +2,9 @@ # Imports ############################################################################# from backend.config.init_config import get_config -from backend.modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider +from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( + WebDataProvider, +) from backend.utils.utils import is_url_valid import backend.modules.dataProviders.dataProviderManager as data_provider_manager from backend.modules.dataProviders.DataProviderException import DataProviderException diff --git a/backend/modules/algoProviders/algoProvidersManager.py b/backend/modules/algoProviders/algoProvidersManager.py index 17d2bf79d..d05b06bf1 100644 --- a/backend/modules/algoProviders/algoProvidersManager.py +++ b/backend/modules/algoProviders/algoProvidersManager.py @@ -1,6 +1,11 @@ from termcolor import colored -from backend.config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR +from backend.config.init_config import ( + get_config, + DEBUG_COLOR, + ERROR_COLOR, + SUCCESS_COLOR, +) from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException from backend.modules.algoProviders.AlgoProvider import AlgoProvider from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py index dcecd3f8a..d484cb515 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py @@ -2,7 +2,10 @@ import shutil import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + hash, +) DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py index 13f6ce476..425313aba 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py +++ b/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py @@ -1,5 +1,8 @@ import os -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash +from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, + hash, +) DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/webDataProvider/useCases/projects.py b/backend/modules/dataProviders/webDataProvider/useCases/projects.py index c3dad454b..03f5dcae2 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/projects.py +++ b/backend/modules/dataProviders/webDataProvider/useCases/projects.py @@ -1,6 +1,8 @@ import backend.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.webDataProvider.useCases.models import get_models_info +from backend.modules.dataProviders.webDataProvider.useCases.models import ( + get_models_info, +) from backend.modules.dataProviders.webDataProvider.useCases.selections import ( get_project_selections, ) From 174631f268ae748d5397814da3eef713a09de464 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:19:35 +0200 Subject: [PATCH 05/97] formatting and cspell --- backend/__init__.py | 2 +- .../integratedAlgoProvider/integratedAlgoProvider.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/__init__.py b/backend/__init__.py index 4a22300fc..ab263b94f 100644 --- a/backend/__init__.py +++ b/backend/__init__.py @@ -1 +1 @@ -from backend.backend import send_frontend, create_app, start_server \ No newline at end of file +from backend.backend import send_frontend, create_app, start_server diff --git a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py index 9386f3444..0fbc713ab 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ b/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -29,7 +29,8 @@ def _get_algorithm_python(algorithm_name): # Import the algorithm algorithm_python = __import__( - "backend.modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, + "backend.modules.algoProviders.integratedAlgoProvider.algorithms." + + algorithm_file, fromlist=["*"], ) From 8741db05783c530cd90bb7359ee05e8283980956 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:29:21 +0200 Subject: [PATCH 06/97] ignore flake8 checks for init files --- backend/setup.cfg | 2 ++ 1 file changed, 2 insertions(+) diff --git a/backend/setup.cfg b/backend/setup.cfg index 7df595195..049324401 100644 --- a/backend/setup.cfg +++ b/backend/setup.cfg @@ -8,3 +8,5 @@ show-source = True # Set the line max length to Black's default max-line-length = 88 + +per-file-ignores = __init__.py:F401 From b1c344ee3f38609c8d56bf9d74db4f13cba9063d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:42:52 +0200 Subject: [PATCH 07/97] adjusted files for flake8 checks --- .gitignore | 3 +++ backend/modules/algoProviders/algoProvidersManager.py | 4 ++-- .../modules/dataProviders/webDataProvider/WebDataProvider.py | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 7d1d2c6b6..2cedd5e05 100644 --- a/.gitignore +++ b/.gitignore @@ -13,6 +13,9 @@ backend.egg-info/ # Front frontend/node_modules/ +# Build +build/ + # Data data/ diff --git a/backend/modules/algoProviders/algoProvidersManager.py b/backend/modules/algoProviders/algoProvidersManager.py index d05b06bf1..81993407f 100644 --- a/backend/modules/algoProviders/algoProvidersManager.py +++ b/backend/modules/algoProviders/algoProvidersManager.py @@ -6,9 +6,9 @@ ERROR_COLOR, SUCCESS_COLOR, ) -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException # noqa from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( +from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( # noqa IntegratedAlgoProvider, ) diff --git a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py index 3ff8afb3a..bc30585d8 100644 --- a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py +++ b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -14,7 +14,7 @@ get_model_result_id, delete_model, ) -import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections +import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status from backend.modules.dataProviders.webDataProvider.cache.cache import Cache From 12d53885b4b0d3462275f718f481e24504810955 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 10:52:35 +0200 Subject: [PATCH 08/97] cpsell and format --- backend/modules/algoProviders/algoProvidersManager.py | 6 ++++-- .../dataProviders/webDataProvider/WebDataProvider.py | 2 +- build_and_run.sh | 4 ---- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/backend/modules/algoProviders/algoProvidersManager.py b/backend/modules/algoProviders/algoProvidersManager.py index 81993407f..282023606 100644 --- a/backend/modules/algoProviders/algoProvidersManager.py +++ b/backend/modules/algoProviders/algoProvidersManager.py @@ -6,9 +6,11 @@ ERROR_COLOR, SUCCESS_COLOR, ) -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException # noqa +from backend.modules.algoProviders.AlgoProviderException import ( + AlgoProviderException, +) # noqa from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( # noqa +from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( # noqa IntegratedAlgoProvider, ) diff --git a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py index bc30585d8..0f2d10678 100644 --- a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py +++ b/backend/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -14,7 +14,7 @@ get_model_result_id, delete_model, ) -import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa +import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status from backend.modules.dataProviders.webDataProvider.cache.cache import Cache diff --git a/build_and_run.sh b/build_and_run.sh index 28e920178..8455fbda5 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -1,16 +1,12 @@ #!/bin/bash -# Nettoyage des anciennes constructions echo "Nettoyage des anciennes constructions..." rm -rf build dist backend.egg-info -# Construction du package echo "Construction du package..." python3 setup.py sdist bdist_wheel -# Installation du package localement echo "Installation du package localement..." pip install . -# Démarrage du serveur debiai-start From e2b08b076036d7066a4ece91c796324818cd5321 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 11:00:44 +0200 Subject: [PATCH 09/97] cspell checks and ignore files added --- build_and_run.sh | 3 --- cspell.json | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/build_and_run.sh b/build_and_run.sh index 8455fbda5..0c97d5b48 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -1,12 +1,9 @@ #!/bin/bash -echo "Nettoyage des anciennes constructions..." rm -rf build dist backend.egg-info -echo "Construction du package..." python3 setup.py sdist bdist_wheel -echo "Installation du package localement..." pip install . debiai-start diff --git a/cspell.json b/cspell.json index 8babf04d1..be7970b5b 100644 --- a/cspell.json +++ b/cspell.json @@ -69,6 +69,7 @@ "__pycache__/", "frontend/dist", "frontend/node_modules", + "./setup.py", "TODO" ] } From b5d42d36bafc36755c6afcf13800d9ae247118b1 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 11:07:14 +0200 Subject: [PATCH 10/97] added sdist and bdist --- cspell.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/cspell.json b/cspell.json index be7970b5b..79eca028d 100644 --- a/cspell.json +++ b/cspell.json @@ -55,7 +55,9 @@ "vuejs", "Vuex", "websrv", - "werkzeug" + "werkzeug", + "sdist", + "bdist" ], "flagWords": [], "ignorePaths": [ From 4e57dc089fda3cc4537cb396c1ed4eff2ba9afa5 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 26 Jun 2024 11:13:16 +0200 Subject: [PATCH 11/97] upgraded version in front and backend --- backend/swagger.yaml | 2 +- frontend/package.json | 2 +- .../widgets/WidgetTemplateFull/WidgetTemplateFull.vue | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/swagger.yaml b/backend/swagger.yaml index a6cb5c5b4..97ffdb6d9 100644 --- a/backend/swagger.yaml +++ b/backend/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.27.1 + version: 0.27.2 title: DebiAI_BACKEND_API description: DebiAI backend api contact: diff --git a/frontend/package.json b/frontend/package.json index 9aade6fc9..458db02be 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "debiai_frontend", - "version": "0.27.1", + "version": "0.27.2", "description": "Frontend for Debiai, made with Vuejs", "license": "Apache-2.0", "scripts": { diff --git a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue index 526d3f872..c55c191b2 100644 --- a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue +++ b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue @@ -420,7 +420,7 @@ export default { // that can watched and used to redraw a plot (see created()) }, redrawRequired: function () { - // The colored colum has changed + // The colored column has changed // We cat tell the parent widget that an update is required this.$parent.colorWarning = true; // A redraw btn will be displayed, pressing it will send redraw From 07939a6cba31cbfe10bfe9fbf501af88b8b1d709 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:11:10 +0200 Subject: [PATCH 12/97] recursive for dist dir into backend package --- MANIFEST.in | 2 ++ 1 file changed, 2 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 32eda311e..4a5a96589 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1,3 @@ include backend/swagger.yaml +recursive-include backend/dist * + From 4f40926476b713120b36a5e246a0e5fc0e2e4fc9 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:14:13 +0200 Subject: [PATCH 13/97] refactor backend script for imports --- backend/__init__.py | 2 +- backend/backend.py | 102 -------------------------------------------- backend/server.py | 4 +- 3 files changed, 3 insertions(+), 105 deletions(-) delete mode 100644 backend/backend.py diff --git a/backend/__init__.py b/backend/__init__.py index ab263b94f..afd319a5f 100644 --- a/backend/__init__.py +++ b/backend/__init__.py @@ -1 +1 @@ -from backend.backend import send_frontend, create_app, start_server +from backend.websrv import send_frontend, create_app, start_server diff --git a/backend/backend.py b/backend/backend.py deleted file mode 100644 index 2a0ed6337..000000000 --- a/backend/backend.py +++ /dev/null @@ -1,102 +0,0 @@ -import connexion -import os -import requests -import webbrowser -import psutil -from termcolor import colored -from threading import Timer -from flask_cors import CORS -from flask import send_from_directory, request, Response -from backend.init import init -from backend.utils.utils import get_app_version -from backend.config.init_config import DEBUG_COLOR - -DEV_FRONTEND_URL = "http://localhost:8080/" -PORT = 3000 - - -def send_frontend(path): - if path == "/": - path = "index.html" - - # If production, use the index.html from the dist folder - if os.getenv("FLASK_ENV") == "production": - return send_from_directory("dist", path) - - # In development, redirect to the DEV_FRONTEND_URL - else: - if request.method == "GET": - try: - resp = requests.get(f"{DEV_FRONTEND_URL}{path}") - excluded_headers = [ - "content-encoding", - "content-length", - "transfer-encoding", - "connection", - ] - headers = [ - (name, value) - for (name, value) in resp.raw.headers.items() - if name.lower() not in excluded_headers - ] - response = Response(resp.content, resp.status_code, headers) - return response - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return ( - "You are in a development environment and the DebAI frontend" - + "is not available at the url : " - + DEV_FRONTEND_URL, - 503, - ) - else: - print("Unexpected request method") - - -def create_app(): - app = connexion.App(__name__) - app.add_api("swagger.yaml", strict_validation=True) - CORS(app.app) - - # For serving the dashboard - @app.route("/") - def send_index(): - return send_frontend("/") - - # For serving the dashboard assets - @app.route("/") - def send_supporting_elements(path): - return send_frontend(path) - - return app - - -def is_browser_open(): - """Check if a browser process is running.""" - browser_keywords = ["chrome", "firefox", "safari", "edge", "opera"] - for proc in psutil.process_iter(["pid", "name"]): - for keyword in browser_keywords: - if keyword in proc.info["name"].lower(): - return True - return False - - -def open_browser(): - url = f"http://localhost:{PORT}" - if is_browser_open(): - webbrowser.open_new_tab(url) - else: - webbrowser.open(url) - - -def start_server(): - # Run DebiAI init - print("================= DebiAI " + get_app_version() + " ====================") - init() - print("======================== RUN =======================") - print( - " DebiAI is available at " - + colored("http://localhost:" + str(PORT), DEBUG_COLOR) - ) - app = create_app() - Timer(1, open_browser).start() - app.run(port=PORT, debug=True) diff --git a/backend/server.py b/backend/server.py index 0c3e2210c..10b6beaeb 100644 --- a/backend/server.py +++ b/backend/server.py @@ -1,5 +1,5 @@ -from backend.backend import start_server +from backend.websrv import start_server def run(): - start_server() + start_server(reloader=False) From 93e4779f4c3393536ab2798d3bf43e19aebf2e6c Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:23:19 +0200 Subject: [PATCH 14/97] conditionnal reloader + debug mode depending on flask env var --- backend/websrv.py | 60 ++++++++++++++++++++++++++++++++++++----------- 1 file changed, 46 insertions(+), 14 deletions(-) diff --git a/backend/websrv.py b/backend/websrv.py index 22db667d4..15e7c4e70 100644 --- a/backend/websrv.py +++ b/backend/websrv.py @@ -1,17 +1,18 @@ import connexion import os import requests +import webbrowser +import psutil from termcolor import colored +from threading import Timer from flask_cors import CORS from flask import send_from_directory, request, Response -from init import init +from backend.init import init from backend.utils.utils import get_app_version from backend.config.init_config import DEBUG_COLOR DEV_FRONTEND_URL = "http://localhost:8080/" PORT = 3000 - - app = connexion.App(__name__) app.add_api("swagger.yaml", strict_validation=True) CORS(app.app) @@ -22,7 +23,10 @@ def send_frontend(path): path = "index.html" # If production, use the index.html from the dist folder - if os.getenv("FLASK_ENV") == "production": + # if os.getenv("FLASK_ENV") == "production": + env = os.getenv("FLASK_ENV", "production") + debug_mode = env == "production" + if debug_mode: return send_from_directory("dist", path) # In development, redirect to the DEV_FRONTEND_URL @@ -54,19 +58,45 @@ def send_frontend(path): print("Unexpected request method") -# For serving the dashboard -@app.route("/") -def send_index(): - return send_frontend("/") +def create_app(): + # For serving the dashboard + @app.route("/") + def send_index(): + return send_frontend("/") + + # For serving the dashboard assets + @app.route("/") + def send_supporting_elements(path): + return send_frontend(path) + + @app.route("/verify") + def index(): + base_dir = os.path.dirname(os.path.abspath(__file__)) + dist_dir = os.path.join(base_dir, "dist") + return f"Chemin du dossier dist : {dist_dir}" + return app -# For serving the dashboard assets -@app.route("/") -def send_supporting_elements(path): - return send_frontend(path) + +def is_browser_open(): + """Check if a browser process is running.""" + browser_keywords = ["chrome", "firefox", "safari", "edge", "opera"] + for proc in psutil.process_iter(["pid", "name"]): + for keyword in browser_keywords: + if keyword in proc.info["name"].lower(): + return True + return False + + +def open_browser(): + url = f"http://localhost:{PORT}" + if is_browser_open(): + webbrowser.open_new_tab(url) + else: + webbrowser.open(url) -if __name__ == "__main__": +def start_server(reloader=True): # Run DebiAI init print("================= DebiAI " + get_app_version() + " ====================") init() @@ -75,4 +105,6 @@ def send_supporting_elements(path): " DebiAI is available at " + colored("http://localhost:" + str(PORT), DEBUG_COLOR) ) - app.run(port=PORT, debug=True) + app = create_app() + Timer(1, open_browser).start() + app.run(port=PORT, debug=True, use_reloader=reloader) From a9f8d5eccb8b12faa2afc8ab9250de780d892320 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:43:14 +0200 Subject: [PATCH 15/97] removed verify function and comments --- backend/websrv.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/backend/websrv.py b/backend/websrv.py index 15e7c4e70..df51dd280 100644 --- a/backend/websrv.py +++ b/backend/websrv.py @@ -23,7 +23,6 @@ def send_frontend(path): path = "index.html" # If production, use the index.html from the dist folder - # if os.getenv("FLASK_ENV") == "production": env = os.getenv("FLASK_ENV", "production") debug_mode = env == "production" if debug_mode: @@ -69,12 +68,6 @@ def send_index(): def send_supporting_elements(path): return send_frontend(path) - @app.route("/verify") - def index(): - base_dir = os.path.dirname(os.path.abspath(__file__)) - dist_dir = os.path.join(base_dir, "dist") - return f"Chemin du dossier dist : {dist_dir}" - return app From 4d305ef7482cfb0fef23cd9fcf253fcf0c77c556 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:43:55 +0200 Subject: [PATCH 16/97] deleted build files wrongly pushed before --- build/lib/backend/__init__.py | 1 - build/lib/backend/backend.py | 80 - build/lib/backend/config/__init__.py | 0 build/lib/backend/config/init_config.py | 240 --- build/lib/backend/controller/__init__.py | 0 build/lib/backend/controller/algoProviders.py | 70 - build/lib/backend/controller/data.py | 38 - build/lib/backend/controller/dataProviders.py | 88 - build/lib/backend/controller/exportMethods.py | 51 - build/lib/backend/controller/layouts.py | 23 - build/lib/backend/controller/models.py | 56 - build/lib/backend/controller/projects.py | 98 - .../lib/backend/controller/pythonModuleDp.py | 66 - build/lib/backend/controller/selection.py | 49 - .../controller/statisticalOperations.py | 646 ------ .../controller/widgetConfigurations.py | 28 - build/lib/backend/init.py | 45 - build/lib/backend/modules/__init__.py | 0 .../modules/algoProviders/AlgoProvider.py | 109 - .../algoProviders/AlgoProviderException.py | 15 - .../backend/modules/algoProviders/__init__.py | 0 .../algoProviders/algoProvidersManager.py | 114 - .../integratedAlgoProvider/__init__.py | 0 .../algorithms/__init__.py | 0 .../algorithms/classificationErrorMetric.py | 99 - .../algorithms/regressionErrorMetric.py | 149 -- .../integratedAlgoProvider.py | 103 - .../integratedAlgoProvider/utils.py | 59 - .../modules/dataProviders/DataProvider.py | 77 - .../dataProviders/DataProviderException.py | 15 - .../backend/modules/dataProviders/__init__.py | 0 .../dataProviders/dataProviderManager.py | 127 -- .../pythonDataProvider/PythonDataProvider.py | 250 --- .../pythonDataProvider/__init__.py | 0 .../pythonDataProvider/dataUtils/__init__.py | 0 .../pythonDataProvider/dataUtils/hash.py | 60 - .../pythonDataProvider/dataUtils/models.py | 281 --- .../pythonDataProvider/dataUtils/projects.py | 256 --- .../dataUtils/pythonModuleUtils.py | 121 -- .../pythonDataProvider/dataUtils/samples.py | 130 -- .../dataUtils/selections.py | 117 - .../pythonDataProvider/dataUtils/tags.py | 109 - .../pythonDataProvider/dataUtils/tree.py | 332 --- .../webDataProvider/WebDataProvider.py | 106 - .../dataProviders/webDataProvider/__init__.py | 0 .../webDataProvider/cache/__init__.py | 0 .../webDataProvider/cache/cache.py | 90 - .../webDataProvider/http/__init__.py | 0 .../dataProviders/webDataProvider/http/api.py | 222 -- .../webDataProvider/useCases/__init__.py | 0 .../webDataProvider/useCases/data.py | 23 - .../webDataProvider/useCases/models.py | 58 - .../webDataProvider/useCases/projects.py | 131 -- .../webDataProvider/useCases/selections.py | 61 - .../backend/modules/exportMethods/__init__.py | 0 .../modules/exportMethods/exportClass.py | 43 - .../modules/exportMethods/exportUtils.py | 200 -- .../modules/exportMethods/methods/__init__.py | 0 .../exportMethods/methods/kafkaUtils.py | 71 - .../exportMethods/methods/postUtils.py | 59 - build/lib/backend/server.py | 5 - build/lib/backend/swagger.yaml | 1914 ----------------- build/lib/backend/tests/__init__.py | 0 .../lib/backend/tests/test_algo_providers.py | 95 - .../lib/backend/tests/test_data_providers.py | 96 - build/lib/backend/tests/test_layouts.py | 131 -- .../tests/test_pythonModuleDataProvider.py | 137 -- .../tests/test_widget_configurations.py | 85 - build/lib/backend/utils/__init__.py | 0 build/lib/backend/utils/layouts/__init__.py | 0 build/lib/backend/utils/layouts/layouts.py | 149 -- build/lib/backend/utils/utils.py | 46 - .../utils/widgetConfigurations/__init__.py | 0 .../widgetConfigurations.py | 123 -- build/lib/backend/websrv.py | 78 - 75 files changed, 8025 deletions(-) delete mode 100644 build/lib/backend/__init__.py delete mode 100644 build/lib/backend/backend.py delete mode 100644 build/lib/backend/config/__init__.py delete mode 100644 build/lib/backend/config/init_config.py delete mode 100644 build/lib/backend/controller/__init__.py delete mode 100644 build/lib/backend/controller/algoProviders.py delete mode 100644 build/lib/backend/controller/data.py delete mode 100644 build/lib/backend/controller/dataProviders.py delete mode 100644 build/lib/backend/controller/exportMethods.py delete mode 100644 build/lib/backend/controller/layouts.py delete mode 100644 build/lib/backend/controller/models.py delete mode 100644 build/lib/backend/controller/projects.py delete mode 100644 build/lib/backend/controller/pythonModuleDp.py delete mode 100644 build/lib/backend/controller/selection.py delete mode 100644 build/lib/backend/controller/statisticalOperations.py delete mode 100644 build/lib/backend/controller/widgetConfigurations.py delete mode 100644 build/lib/backend/init.py delete mode 100644 build/lib/backend/modules/__init__.py delete mode 100644 build/lib/backend/modules/algoProviders/AlgoProvider.py delete mode 100644 build/lib/backend/modules/algoProviders/AlgoProviderException.py delete mode 100644 build/lib/backend/modules/algoProviders/__init__.py delete mode 100644 build/lib/backend/modules/algoProviders/algoProvidersManager.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py delete mode 100644 build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py delete mode 100644 build/lib/backend/modules/dataProviders/DataProvider.py delete mode 100644 build/lib/backend/modules/dataProviders/DataProviderException.py delete mode 100644 build/lib/backend/modules/dataProviders/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/dataProviderManager.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py delete mode 100644 build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/http/api.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py delete mode 100644 build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py delete mode 100644 build/lib/backend/modules/exportMethods/__init__.py delete mode 100644 build/lib/backend/modules/exportMethods/exportClass.py delete mode 100644 build/lib/backend/modules/exportMethods/exportUtils.py delete mode 100644 build/lib/backend/modules/exportMethods/methods/__init__.py delete mode 100644 build/lib/backend/modules/exportMethods/methods/kafkaUtils.py delete mode 100644 build/lib/backend/modules/exportMethods/methods/postUtils.py delete mode 100644 build/lib/backend/server.py delete mode 100644 build/lib/backend/swagger.yaml delete mode 100644 build/lib/backend/tests/__init__.py delete mode 100644 build/lib/backend/tests/test_algo_providers.py delete mode 100644 build/lib/backend/tests/test_data_providers.py delete mode 100644 build/lib/backend/tests/test_layouts.py delete mode 100644 build/lib/backend/tests/test_pythonModuleDataProvider.py delete mode 100644 build/lib/backend/tests/test_widget_configurations.py delete mode 100644 build/lib/backend/utils/__init__.py delete mode 100644 build/lib/backend/utils/layouts/__init__.py delete mode 100644 build/lib/backend/utils/layouts/layouts.py delete mode 100644 build/lib/backend/utils/utils.py delete mode 100644 build/lib/backend/utils/widgetConfigurations/__init__.py delete mode 100644 build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py delete mode 100644 build/lib/backend/websrv.py diff --git a/build/lib/backend/__init__.py b/build/lib/backend/__init__.py deleted file mode 100644 index 4a22300fc..000000000 --- a/build/lib/backend/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from backend.backend import send_frontend, create_app, start_server \ No newline at end of file diff --git a/build/lib/backend/backend.py b/build/lib/backend/backend.py deleted file mode 100644 index 7429936da..000000000 --- a/build/lib/backend/backend.py +++ /dev/null @@ -1,80 +0,0 @@ -import connexion -import os -import requests -from termcolor import colored -from flask_cors import CORS -from flask import send_from_directory, request, Response -from backend.init import init -from backend.utils.utils import get_app_version -from backend.config.init_config import DEBUG_COLOR - -DEV_FRONTEND_URL = "http://localhost:8080/" -PORT = 3000 - - -def send_frontend(path): - if path == "/": - path = "index.html" - - # If production, use the index.html from the dist folder - if os.getenv("FLASK_ENV") == "production": - return send_from_directory("dist", path) - - # In development, redirect to the DEV_FRONTEND_URL - else: - if request.method == "GET": - try: - resp = requests.get(f"{DEV_FRONTEND_URL}{path}") - excluded_headers = [ - "content-encoding", - "content-length", - "transfer-encoding", - "connection", - ] - headers = [ - (name, value) - for (name, value) in resp.raw.headers.items() - if name.lower() not in excluded_headers - ] - response = Response(resp.content, resp.status_code, headers) - return response - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return ( - "You are in a development environment and the DebAI frontend" - + "is not available at the url : " - + DEV_FRONTEND_URL, - 503, - ) - else: - print("Unexpected request method") - - -def create_app(): - app = connexion.App(__name__) - app.add_api("swagger.yaml", strict_validation=True) - CORS(app.app) - - # For serving the dashboard - @app.route("/") - def send_index(): - return send_frontend("/") - - # For serving the dashboard assets - @app.route("/") - def send_supporting_elements(path): - return send_frontend(path) - - return app - - -def start_server(): - # Run DebiAI init - print("================= DebiAI " + get_app_version() + " ====================") - init() - print("======================== RUN =======================") - print( - " DebiAI is available at " - + colored("http://localhost:" + str(PORT), DEBUG_COLOR) - ) - app = create_app() - app.run(port=PORT, debug=True) diff --git a/build/lib/backend/config/__init__.py b/build/lib/backend/config/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/config/init_config.py b/build/lib/backend/config/init_config.py deleted file mode 100644 index f8663f715..000000000 --- a/build/lib/backend/config/init_config.py +++ /dev/null @@ -1,240 +0,0 @@ -from configparser import ConfigParser -from termcolor import colored - -import os - -config_path = "config/config.ini" -config_parser = ConfigParser() - -DEBUG_COLOR = "light_blue" -DEBUG_SECONDARY_COLOR = "blue" -ERROR_COLOR = "light_red" -SUCCESS_COLOR = "green" - -# Default config -config = { - "DATA_PROVIDERS_CONFIG": { - "creation": True, - "deletion": True, - }, - "INTEGRATED_DATA_PROVIDER": { - "enabled": True, - "allow_create_projects": True, - "allow_delete_projects": True, - "allow_insert_data": True, - "allow_create_selections": True, - "allow_delete_selections": True, - "allow_create_models": True, - "allow_delete_models": True, - "allow_insert_results": True, - }, - "WEB_DATA_PROVIDERS_CONFIG": { - "cache": True, - "cache_duration": 120, - }, - "WEB_DATA_PROVIDERS": { - # "name": "url" - }, - "ALGO_PROVIDERS_CONFIG": { - "enable_integrated": True, - "creation": True, - "deletion": True, - }, - "ALGO_PROVIDERS": { - # "name": "url" - }, - "EXPORT_METHODS_CONFIG": { - "creation": True, - "deletion": True, - }, - "EXPORT_METHODS": { - # "name": "type, param1, param2, ..." - }, -} - -# Env vars mapping -ENV_VAR_MAPPING = { - "DATA_PROVIDERS_CONFIG": { - "creation": "DEBIAI_DATA_PROVIDERS_ALLOW_CREATION", - "deletion": "DEBIAI_DATA_PROVIDERS_ALLOW_DELETION", - }, - "INTEGRATED_DATA_PROVIDER": { - "enabled": "DEBIAI_INTEGRATED_DATA_PROVIDER_ENABLED", - "allow_create_projects": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_PROJECTS", - "allow_delete_projects": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_PROJECTS", - "allow_insert_data": "DEBIAI_INTEGRATED_DP_ALLOW_INSERT_DATA", - "allow_create_selections": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_SELECTIONS", - "allow_delete_selections": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_SELECTIONS", - "allow_create_models": "DEBIAI_INTEGRATED_DP_ALLOW_CREATE_MODELS", - "allow_delete_models": "DEBIAI_INTEGRATED_DP_ALLOW_DELETE_MODELS", - "allow_insert_results": "DEBIAI_INTEGRATED_DP_ALLOW_INSERT_RESULTS", - }, - "WEB_DATA_PROVIDERS_CONFIG": { - "cache": "DEBIAI_WEB_DATA_PROVIDERS_CACHE_ENABLED", - "cache_duration": "DEBIAI_WEB_DATA_PROVIDERS_CACHE_DURATION", - }, - "ALGO_PROVIDERS_CONFIG": { - "enable_integrated": "DEBIAI_ALGO_PROVIDERS_ENABLE_INTEGRATED", - "creation": "DEBIAI_ALGO_PROVIDERS_ALLOW_CREATION", - "deletion": "DEBIAI_ALGO_PROVIDERS_ALLOW_DELETION", - }, - "EXPORT_METHODS_CONFIG": { - "creation": "DEBIAI_EXPORT_METHODS_ALLOW_CREATION", - "deletion": "DEBIAI_EXPORT_METHODS_ALLOW_DELETION", - }, -} - -# List of list based config sections + their env var mapping -LIST_CONFIG_SECTIONS = { - "WEB_DATA_PROVIDERS": "DEBIAI_WEB_DATA_PROVIDER_", - "ALGO_PROVIDERS": "DEBIAI_ALGO_PROVIDER_", - "EXPORT_METHODS": "DEBIAI_EXPORT_METHOD_", -} - -changes_made = False - - -def get_config_value(section, key, config_parser): - # Return the value of the key in the section of the config_parser - # Or return the ENV_VAR if it exists - - value = None - ENV_VAR = ENV_VAR_MAPPING[section][key] - - # Get the value from the config file - if section in config_parser and key in config_parser[section]: - value = str.lower(config_parser[section][key]) - - # Get the value from the environment variables - if ENV_VAR in os.environ: - value = str.lower(os.environ[ENV_VAR]) - - if value is None: - print( - " - Missing " - + colored(section, DEBUG_SECONDARY_COLOR) - + " / " - + colored(key, DEBUG_SECONDARY_COLOR) - + " in config or in " - + colored(ENV_VAR, DEBUG_SECONDARY_COLOR) - + " env var, using default" - ) - return None - - return value - - -def get_config_values(section, config_parser): - # Return a dict of the values of the section of the config_parser - # Or return the ENV_VAR if it exists - - values = {} - ENV_VAR = LIST_CONFIG_SECTIONS[section] - - # Get the value from the config file - if section in config_parser: - for key in config_parser[section]: - values[key] = str.lower(config_parser[section][key]) - - # Get the value from the environment variables - # iterate over the keys of the env var - for key in os.environ.keys(): - if key.startswith(ENV_VAR): - # Get the key name without the env var prefix - key_name = key[len(ENV_VAR) :] # noqa - values[key_name] = str.lower(os.environ[key]) - - return values - - -def set_config_value(section, key, value): - global config, changes_made - - if section in config and key in config[section]: - if config[section][key] != value: - # The default value is different from the one in the config file - config[section][key] = value - changes_made = True - - print( - " - Overriding " - + colored(section, DEBUG_COLOR) - + " / " - + colored(key, DEBUG_COLOR) - + " with value " - + colored(str(value), DEBUG_COLOR) - ) - - -def init_config(): - global config - - print("===================== CONFIG =======================") - - # Read the config file - config_parser.read(config_path) - - for section in config.keys(): - # Deal with boolean, integer and string values - for key in config[section].keys(): - # Get the value from the config file or the environment variables - value = get_config_value(section, key, config_parser) - - if value is None: - continue - - # Deal with booleans - if type(config[section][key]) is bool: - if value == "false": - set_config_value(section, key, False) - elif value == "true": - set_config_value(section, key, True) - else: - print( - colored(" [ERROR]", ERROR_COLOR) - + " Invalid boolean value for " - + colored(key, DEBUG_COLOR) - + ", using default value" - ) - continue - - # Deal with integers - elif type(config[section][key]) is int: - try: - set_config_value(section, key, int(value)) - except ValueError: - print( - colored(" [ERROR]", ERROR_COLOR) - + " Invalid integer value for " - + colored(key, DEBUG_COLOR) - + ", using default value" - ) - continue - - # Deal with strings - elif type(config[section][key]) is str: - set_config_value(section, key, str(value)) - - # Deal with list based config elements - if section in LIST_CONFIG_SECTIONS: - elements = get_config_values(section, config_parser) - - for element_name in elements: - print( - " - Adding " - + section.lower().replace("_", "-")[0:-1] - + " " - + colored(element_name, DEBUG_COLOR) - + " (" - + colored(elements[element_name], DEBUG_COLOR) - + ")" - ) - - config[section][element_name] = elements[element_name] - - if not changes_made: - print(" Default config used") - - -def get_config(): - return config diff --git a/build/lib/backend/controller/__init__.py b/build/lib/backend/controller/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/controller/algoProviders.py b/build/lib/backend/controller/algoProviders.py deleted file mode 100644 index 3116cf164..000000000 --- a/build/lib/backend/controller/algoProviders.py +++ /dev/null @@ -1,70 +0,0 @@ -############################################################################# -# Imports -############################################################################# -from backend.config.init_config import get_config -from backend.utils.utils import is_url_valid, is_valid_name -import backend.modules.algoProviders.algoProvidersManager as algo_provider_manager -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException -from backend.modules.algoProviders.AlgoProvider import AlgoProvider - -############################################################################# -# Algo providers Management -############################################################################# - - -def get_algo_providers(): - algorithms = algo_provider_manager.get_algo_providers_json() - return algorithms, 200 - - -def post_algo_provider(data): - # Check if we are allowed to add AlgoProviders from the config file - config = get_config() - creation_allowed = config["ALGO_PROVIDERS_CONFIG"]["creation"] - if not creation_allowed: - return "AlgoProvider creation is not allowed", 403 - - # Check if algoProviders already exists - if algo_provider_manager.algo_provider_exists(data["name"]): - return "AlgoProvider '" + data["name"] + "' already exists", 400 - - # Check if algoProviders name is valid - if not is_valid_name(data["name"]): - return "Invalid algoProviders name", 400 - - # Add the algoProvider - # Check if url is valid - if not is_url_valid(data["url"]): - return "Invalid url", 400 - - algo_provider_manager.add(AlgoProvider(data["url"], data["name"])) - - return None, 204 - - -def use_algo(algoProviderName, algoId, data): - # Check if algoProviders exists - if not algo_provider_manager.algo_provider_exists(algoProviderName): - return "AlgoProvider " + algoProviderName + " does not exists", 404 - - try: - # Use algoProviders - algo_provider = algo_provider_manager.get_single_algo_provider(algoProviderName) - return algo_provider.use_algorithm(algoId, data), 200 - except AlgoProviderException as e: - return e.message, e.status_code - - -def delete_algo_provider(name): - # Check if we are allowed to add AlgoProviders from the config file - config = get_config() - deletion_allowed = config["ALGO_PROVIDERS_CONFIG"]["deletion"] - if not deletion_allowed: - return "AlgoProvider deletion is not allowed", 403 - - # Delete the algoProvider - try: - algo_provider_manager.delete(name) - return None, 204 - except AlgoProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/data.py b/build/lib/backend/controller/data.py deleted file mode 100644 index 420cb5d6e..000000000 --- a/build/lib/backend/controller/data.py +++ /dev/null @@ -1,38 +0,0 @@ -############################################################################# -# Imports -############################################################################# -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException - -############################################################################# -# Data Management -############################################################################# - - -def get_data(dataProviderId, projectId, data): - # return a project data from a list of ids - sampleIds = data["sampleIds"] - analysis = data["analysis"] - - try: - # Find the data provider - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - # Ask for the data - samples = data_provider.get_samples(projectId, analysis, sampleIds) - - if samples is not None: - return { - "data": samples, - "dataMap": True, - }, 200 - - return ( - "Can't find samples for project " - + projectId - + " on data provider : " - + dataProviderId, - 404, - ) - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/dataProviders.py b/build/lib/backend/controller/dataProviders.py deleted file mode 100644 index 503a0fa21..000000000 --- a/build/lib/backend/controller/dataProviders.py +++ /dev/null @@ -1,88 +0,0 @@ -############################################################################# -# Imports -############################################################################# -from backend.config.init_config import get_config -from backend.modules.dataProviders.webDataProvider.WebDataProvider import WebDataProvider -from backend.utils.utils import is_url_valid -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException - -############################################################################# -# Data Providers Management -############################################################################# - - -def get_data_providers(): - data_provider_list = data_provider_manager.get_data_provider_list() - providers_formatted = [] - for data_provider in data_provider_list: - data = {} - if data_provider.type != "Python module Data Provider": - data["url"] = data_provider.url - data["status"] = data_provider.is_alive() - - data["name"] = data_provider.name - data["type"] = data_provider.type - - providers_formatted.append(data) - - return providers_formatted, 200 - - -def get_data_provider_info(dataProviderId): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - info = data_provider.get_info() - - return info, 200 - except DataProviderException as e: - return e.message, e.status_code - - -def post_data_providers(data): - # Check if we are allowed to add data providers from the config file - config = get_config() - creation_allowed = config["DATA_PROVIDERS_CONFIG"]["creation"] - if not creation_allowed: - return "Data provider creation is not allowed", 403 - - # Check if data provider already exists - if data_provider_manager.data_provider_exists(data["name"]): - return "Data provider already exists", 400 - - # Check if data provider name is valid - if not data_provider_manager.is_valid_name(data["name"]): - return "Invalid data provider name", 400 - - try: - # Add data provider - if data["type"].lower() == "web": - # Check if url is valid - if "url" not in data: - return "A url must be provided", 400 - - if not is_url_valid(data["url"]): - return "Invalid url", 400 - - data_provider_manager.add(WebDataProvider(data["url"], data["name"])) - else: - return "Invalid data provider type, valid types are: Web", 400 - - return None, 204 - except DataProviderException as e: - return e.message, e.status_code - - -def delete_data_providers(dataProviderId): - # Check if we are allowed to add data providers from the config file - config = get_config() - deletion_allowed = config["DATA_PROVIDERS_CONFIG"]["deletion"] - if not deletion_allowed: - return "Data provider deletion is not allowed", 403 - - # Delete data provider - try: - data_provider_manager.delete(dataProviderId) - return None, 204 - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/exportMethods.py b/build/lib/backend/controller/exportMethods.py deleted file mode 100644 index 227c40767..000000000 --- a/build/lib/backend/controller/exportMethods.py +++ /dev/null @@ -1,51 +0,0 @@ -from backend.config.init_config import get_config -import backend.modules.exportMethods.exportUtils as exportUtils - -############################################################################# -# Export API Management -############################################################################# - - -def get_export_methods(): - # ParametersCheck - return exportUtils.get_export_methods(), 200 - - -def post_export_method(data): - # Check if the creation of export methods is allowed - config = get_config() - creation_allowed = config["EXPORT_METHODS_CONFIG"]["creation"] - if not creation_allowed: - return "Export method creation is not allowed", 403 - - try: - return exportUtils.add_export_method(data), 200 - except Exception as e: - return str(e), 400 - - -def delete_export_method(exportMethodId): - # Check if the deletion of export methods is allowed - config = get_config() - deletion_allowed = config["EXPORT_METHODS_CONFIG"]["deletion"] - if not deletion_allowed: - return "Export method deletion is not allowed", 403 - - try: - return exportUtils.delete_export_method(exportMethodId), 200 - except Exception as e: - return str(e), 400 - - -def exportSelection(dataProviderId, projectId, data): - try: - return exportUtils.exportSelection(dataProviderId, projectId, data), 200 - except Exception as e: - return str(e), 400 - - -def exportData(exportMethodId, data): - try: - return exportUtils.exportData(exportMethodId, data), 200 - except Exception as e: - return str(e), 400 diff --git a/build/lib/backend/controller/layouts.py b/build/lib/backend/controller/layouts.py deleted file mode 100644 index da3962a15..000000000 --- a/build/lib/backend/controller/layouts.py +++ /dev/null @@ -1,23 +0,0 @@ -############################################################################# -# Imports -############################################################################# -import backend.utils.layouts.layouts as layoutsUtils - -############################################################################# -# Analysis dashboard layout Management -############################################################################# - - -def get_layouts(): - layouts_overview = layoutsUtils.get_layouts() - return layouts_overview, 200 - - -def post_layout(data): - layoutsUtils.add_layout(data) - return None, 204 - - -def delete_layout(id): - layoutsUtils.delete_layout(id) - return None, 204 diff --git a/build/lib/backend/controller/models.py b/build/lib/backend/controller/models.py deleted file mode 100644 index 4137e33ea..000000000 --- a/build/lib/backend/controller/models.py +++ /dev/null @@ -1,56 +0,0 @@ -############################################################################# -# Imports -############################################################################# - -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException - -############################################################################# -# MODELS Management -############################################################################# - - -def get_model_id_list(dataProviderId, projectId, modelId): - """ - Get the list of models for a project - """ - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return list(data_provider.get_model_results_id_list(projectId, modelId)), 200 - except DataProviderException as e: - return e.message, e.status_code - - -def get_results(dataProviderId, projectId, modelId, data): - """ - Get the model results from a sample list - """ - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return ( - data_provider.get_model_results(projectId, modelId, data["sampleIds"]), - 200, - ) - except DataProviderException as e: - return e.message, e.status_code - - -def post_model(dataProviderId, projectId, data): - # Create a new model - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.create_model(projectId, data) - return "model created", 200 - except DataProviderException as e: - return e.message, e.status_code - - -def delete_model(dataProviderId, projectId, modelId): - """ - Delete a model - """ - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return data_provider.delete_model(projectId, modelId), 200 - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/projects.py b/build/lib/backend/controller/projects.py deleted file mode 100644 index fdfd26ee3..000000000 --- a/build/lib/backend/controller/projects.py +++ /dev/null @@ -1,98 +0,0 @@ -############################################################################# -# Imports -############################################################################# -from backend.modules.dataProviders.DataProviderException import DataProviderException -import backend.modules.dataProviders.dataProviderManager as data_provider_manager - -############################################################################# -# PROJECTS Management -############################################################################# - - -def ping(): - return "Online", 200 - - -def get_data_providers_project(dataProviderId): - # Return a list of project overviews for a specific data provider - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - if data_provider is None: - return "Data provider not found", 404 - - try: - projects = data_provider.get_projects() - - if projects is not None: - # Adding data provider id to projects - for project in projects: - project["dataProviderId"] = data_provider.name - - except DataProviderException as e: - print("Warning get DP projects : " + e.message) - - return projects, 200 - - -def get_projects(): - # Return a list of project overviews from all the data providers - data_providers_list = data_provider_manager.get_data_provider_list() - projectOverviews = [] - for data_provider in data_providers_list: - try: - projects = data_provider.get_projects() - - if projects is not None: - # Adding data provider id to projects - for project in projects: - project["dataProviderId"] = data_provider.name - - projectOverviews.extend(projects) - - except DataProviderException as e: - print("Warning get DP projects : " + e.message) - - return projectOverviews, 200 - - -def get_project(dataProviderId, projectId): - # return the info about datasets, models, selections & tags - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - project = data_provider.get_project(projectId) - - # Adding data provider id to project - project["dataProviderId"] = dataProviderId - - return project, 200 - except DataProviderException as e: - return e.message, e.status_code - - -def get_data_id_list(dataProviderId, projectId, requestParameters): - # return the list of data ids - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - data_id_list = data_provider.get_id_list( - projectId, - requestParameters["analysis"], - requestParameters["from"], - requestParameters["to"], - ) - - return data_id_list, 200 - except DataProviderException as e: - return e.message, e.status_code - - -def delete_project(dataProviderId, projectId): - # Delete a project - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - data_provider.delete_project(projectId) - return "Project deleted", 200 - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/pythonModuleDp.py b/build/lib/backend/controller/pythonModuleDp.py deleted file mode 100644 index 432b57944..000000000 --- a/build/lib/backend/controller/pythonModuleDp.py +++ /dev/null @@ -1,66 +0,0 @@ -from backend.modules.dataProviders.DataProviderException import DataProviderException -import backend.modules.dataProviders.dataProviderManager as data_provider_manager - - -# Project -def post_project(data): - # Ask a data provider to create a project - dataProviderId = "Python module Data Provider" - projectName = data["projectName"] - - # Check project name - if len(projectName) > 100: - return "Project name too long", 400 - - # Create project - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - - project = data_provider.create_project(projectName) - - # Adding data provider id to project - project["dataProviderId"] = dataProviderId - - return project, 200 - except DataProviderException as e: - return e.message, e.status_code - - -# Block level -def post_block_levels(dataProviderId, projectId, block_levels): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.update_block_structure(projectId, block_levels) - return block_levels, 200 - except DataProviderException as e: - return e.message, e.status_code - - -# Expected_results -def post_resultsStructure(dataProviderId, projectId, resultStructure): - # Add the expected results structure - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.update_results_structure(projectId, resultStructure) - return resultStructure, 200 - except DataProviderException as e: - return e.message, e.status_code - - -def post_block_tree(dataProviderId, projectId, data): - # Add data to a project from a tree - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return data_provider.add_block_tree(projectId, data), 200 - except DataProviderException as e: - return e.message, e.status_code - - -# Add model results -def add_results_dict(dataProviderId, projectId, modelId, data): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.add_results_dict(projectId, modelId, data) - return "Results added", 200 - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/selection.py b/build/lib/backend/controller/selection.py deleted file mode 100644 index 2e5ddc0e1..000000000 --- a/build/lib/backend/controller/selection.py +++ /dev/null @@ -1,49 +0,0 @@ -############################################################################# -# Imports -############################################################################# - -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException - -############################################################################# -# Selections Management -############################################################################# - - -def get_selections(dataProviderId, projectId): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return data_provider.get_selections(projectId), 200 - except DataProviderException as e: - return e.message, e.status_code - - -def get_selection_id_list(dataProviderId, projectId, selectionId): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - return data_provider.get_selection_id_list(projectId, selectionId), 200 - except DataProviderException as e: - return e.message, e.status_code - - -def post_selection(dataProviderId, projectId, data): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.create_selection( - projectId, - data["selectionName"], - data["sampleHashList"], - data["requestId"] if "requestId" in data else None, - ) - return "Selection added", 200 - except DataProviderException as e: - return e.message, e.status_code - - -def delete_selection(dataProviderId, projectId, selectionId): - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - data_provider.delete_selection(projectId, selectionId) - return "Selection deleted", 200 - except DataProviderException as e: - return e.message, e.status_code diff --git a/build/lib/backend/controller/statisticalOperations.py b/build/lib/backend/controller/statisticalOperations.py deleted file mode 100644 index dcc3482d4..000000000 --- a/build/lib/backend/controller/statisticalOperations.py +++ /dev/null @@ -1,646 +0,0 @@ -import pandas as pd -import numpy as np -import numpy.random as nr - -from scipy.stats.stats import pearsonr, spearmanr -from scipy.special import digamma -import scipy.spatial as ss -from scipy.spatial.ckdtree import cKDTree -from sklearn.neighbors import NearestNeighbors -from math import log, fabs, sqrt - - -#  === Correlation matrix === -def pearsonCorrelation(data): - """Computes the Pearson's coefficient for every pair of variables provided - - Parameters - ---------- - data: list of lists where each list contains the observations of a single variable - : Array of rows with the same sizes (discrete & continuous) - - Return - ------ - result: correlation matrix along with the p-value of the significance test of the coefficients - significance level legend: 3(***) -> p-value<0.01 -> The coefficient is significant at 99% - 2(**) -> p-value<0.05 ->The coefficient is significant at 95% - 1(*) -> p-value<0.1 -> The coefficient is significant at 90% - - """ - - for i in range(len(data) - 1): - assert len(data[i]) == len( - data[i + 1] - ), "The provided samples should have the same length" - # transform the list of samples to dataframe - df = pd.DataFrame(data).transpose() - rho = df.corr() # calculate the correlation matrix - pval = df.corr(method=lambda x, y: pearsonr(x, y)[1]) - np.eye( - *rho.shape - ) # calculate the p-value - # return the number of * - p = pval.applymap(lambda x: (len([i for t in [0.01, 0.05, 0.1] if x <= t]))) - ret = rho.values.tolist() - for i in range(rho.shape[0]): - for j in range(rho.shape[1]): - ret[i][j] = [float(rho[i][j]), float(p[i][j])] - return ( - ret, - 200, - ) # pearson correlation matrix with the significance of the coefficient - - -def spearmanCorrelation(data): - """ - Computes the Spearman's coefficient for every pair of variables provided - - Parameters - ---------- - data: list of lists where each list contains the observations of a single variable - - Return - ------ - result: correlation matrix along with the p-value of the significance test of the coefficients - significance level legend: 3(***) -> p-value<0.01 -> The coefficient is significant at 99% - 2(**) -> p-value<0.05 ->The coefficient is significant at 95% - 1(*) -> p-value<0.1 -> The coefficient is significant at 90% - - """ - for i in range(len(data) - 1): - assert len(data[i]) == len( - data[i + 1] - ), "The provided samples should have the same length" - # transform the list of samples to dataframe - df = pd.DataFrame(data).transpose() - rho = df.corr(method="spearman") # calculate the correlation matrix - pval = df.corr(method=lambda x, y: spearmanr(x, y)[1]) - np.eye( - *rho.shape - ) # calculate the p-value - # return the number of significant level - p = pval.applymap(lambda x: (len([i for t in [0.01, 0.05, 0.1] if x <= t]))) - result = rho.values.tolist() - for i in range(rho.shape[0]): - for j in range(rho.shape[1]): - result[i][j] = [float(rho[i][j]), float(p[i][j])] - return result, 200 - - -# === Mutual Information === -def entropy_discrete(x, base=2): - """ - Computes the entropy of a discrete random variable - - Parameters: - ----------- - - x: List or array of one variable. - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - Return: - ------- - - Output: A float: The value of the entropy - """ - _, count = np.unique(x, return_counts=True, axis=0) - probability = count.astype(float) / len(x) - # Removing the elements which have 0 probability/weight to avoid log(0) - probability = probability[probability > 0.0] - return np.sum(-1 * probability * np.log(probability)) / np.log(base) - - -def entropy_discrete_xy(x, y, base=2): - """ - Computes the entropy of the joint distribution of two discrete random variables - - Parameters: - ----------- - x,y : Two random variables samples of the same length - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - Returns: - -------- - - Output: The value of the entropy: a float - - """ - assert len(x) == len(y), "The two provided samples should be of the same length" - xy = np.c_[x, y] - # construction of point : - # Example : - # (x,y) - # [[1. 2.] - # [2. 4.] - # [3. 5.]] - return entropy_discrete(xy, base) - - -def discrete_mutual_information(x, y, base=2): - """ - Computes the mutual information of two discrete random variables: x,y - - Parameters: - ----------- - - x,y: Two random variable samples of the same length - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - Returns: - ------- - - Output: The value of the mutual information - """ - assert len(x) == len(y), "The two provided samples should be of the same length" - return ( - entropy_discrete(x, base) - + entropy_discrete(y, base) - - entropy_discrete_xy(x, y, base) - ) - - -def continuous_mutual_information(x, y, k=1, base=2): - """ - Computes the mutual information between two continuous random variables - - Parameters: - ----------- - x,y: Data: lists or numpy arrays - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - Returns: - -------- - - Output: the mutual information - """ - x, y = np.asarray(x), np.asarray(y) - x, y = x.reshape(x.shape[0], -1), y.reshape(y.shape[0], -1) - x = x + 1e-10 * np.random.random_sample(x.shape) - y = y + 1e-10 * np.random.random_sample(y.shape) - xy = np.c_[x, y] - x_tree = cKDTree(x) - y_tree = cKDTree(y) - xy_tree = cKDTree(xy) - # query with k=k+1 to return the nearest neighbor, not counting the data point itself - dist, _ = xy_tree.query(xy, k=k + 1, p=np.inf) - epsilon = dist[:, -1] - - # for each point, count the number of neighbors - # whose distance in the x-subspace is strictly < epsilon - # repeat for the y subspace - n = len(x) - nx = np.empty(n, dtype=np.int) - ny = np.empty(n, dtype=np.int) - for ii in range(n): - if epsilon[ii] <= 1e-10: - nx[ii] = len(x_tree.query_ball_point(x_tree.data[ii], r=1e-9, p=np.inf)) - 1 - ny[ii] = len(y_tree.query_ball_point(y_tree.data[ii], r=1e-9, p=np.inf)) - 1 - else: - nx[ii] = ( - len( - x_tree.query_ball_point( - x_tree.data[ii], r=epsilon[ii] - 1e-9, p=np.inf - ) - ) - - 1 - ) - ny[ii] = ( - len( - y_tree.query_ball_point( - y_tree.data[ii], r=epsilon[ii] - 1e-9, p=np.inf - ) - ) - - 1 - ) - - mi = ( - digamma(k) - np.mean(digamma(nx + 1) + digamma(ny + 1)) + digamma(n) - ) / np.log( - base - ) # version (1) in krakow scientific paper - - return mi - - -def mixed_mutual_information(c, d, n_neighbors, base=10): - """ - Compute mutual information between continuous and discrete variables. - - Parameters - ---------- - c : ndarray, shape (n_samples,) - Samples of a continuous random variable. - d : ndarray, shape (n_samples,) - Samples of a discrete random variable. - n_neighbors : int - Number of nearest neighbors to search for each point - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - - Returns: - -------- - Output: The mutual information value - """ - n_samples = c.shape[0] - c = c.reshape((-1, 1)) - - radius = np.empty(n_samples) - label_counts = np.empty(n_samples) - k_all = np.empty(n_samples) - nn = NearestNeighbors() - for label in np.unique(d): - mask = d == label - count = np.sum(mask) - if count > 1: - k = min(n_neighbors, count - 1) - nn.set_params(n_neighbors=k) - nn.fit(c[mask]) - r = nn.kneighbors()[0] - # print(r) - radius[mask] = np.nextafter(r[:, -1], 0) - # print(radius) - k_all[mask] = k - label_counts[mask] = count - - # Ignore points with unique labels. - mask = label_counts > 1 - n_samples = np.sum(mask) - label_counts = label_counts[mask] - k_all = k_all[mask] - c = c[mask] - radius = radius[mask] - - nn.set_params(algorithm="kd_tree") - nn.fit(c) - ind = nn.radius_neighbors(radius=radius, return_distance=False) - m_all = np.array([i.size for i in ind]) - - mi = ( - digamma(n_samples) - + np.mean(digamma(k_all)) - - np.mean(digamma(label_counts)) - - np.mean(digamma(m_all + 1)) - ) - - return mi / log(base) - - -def normalise_function(normalise, mutual_information, entropy_X, entropy_Y): - """ - normalize the mutual information coefficient - Parameters: - ----------- - normalize: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - mutual_information: mutual information coefficient - entropy_X: the entropy of the first variable - entropy_Y: the entropy of the second variable - Returns: - ------- - Output: - The value of the normalized mutual information coefficient - """ - if normalise == "none": - ratio = 1 - elif normalise == "max": - ratio = max(entropy_X, entropy_Y) - elif normalise == "min": - ratio = min(entropy_X, entropy_Y) - elif normalise == "square root": - ratio = sqrt(np.abs(entropy_X * entropy_Y)) - elif normalise == "mean": - ratio = (entropy_X + entropy_Y) / 2 - else: - raise NotImplementedError( - "Variable 'normalise' takes only 'max' or 'min' or 'square root' or 'mean' or 'none'" - ) - - return mutual_information / ratio - - -def continuous_iterate_function(list_continuous, k=3, base=3, normalise="none"): - """ - Parameters: - ----------- - list_continuous: list of list of the continuous variables - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - - Returns: - ------- - Output: - an array of the mutual information between the continuous variables - """ - continuous = np.eye(len(list_continuous), len(list_continuous)).tolist() - for i in range(len(continuous)): - for j in range(i, len(continuous)): - continuous[i][j] = continuous[j][i] = normalise_function( - normalise, - continuous_mutual_information( - list_continuous[i], list_continuous[j], k=k, base=base - ), - continuous_mutual_information( - list_continuous[i], list_continuous[i], k=k, base=base - ), - continuous_mutual_information( - list_continuous[j], list_continuous[j], k=k, base=base - ), - ) - return continuous - - -def discrete_iterate_function(list_discrete, base=10, normalise="none"): - """ - Parameters: - ----------- - list_discrete: list of list of the discrete variables - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - - Returns: - ------- - Output: - an array of the mutual information between the discrete variables - """ - discrete = np.eye(len(list_discrete), len(list_discrete)).tolist() - for i in range(len(discrete)): - for j in range(i, len(discrete)): - discrete[i][j] = discrete[j][i] = normalise_function( - normalise, - discrete_mutual_information( - list_discrete[i], list_discrete[j], base=base - ), - entropy_discrete(list_discrete[i], base=base), - entropy_discrete(list_discrete[j], base=base), - ) - return discrete - - -def mixed_iterate_function( - list_continuous, list_discrete, base=10, k=3, normalise="none" -): - """ - Parameters: - ----------- - list_continuous: list of list of the continuous variables - list_discrete: list of list of the discrete variables - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - normalise: the choice of normalize function : takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - - Returns: - ------- - Output: - an array of the mutual information between the all variables: the continuous and the discrete ones - """ - mixed = np.eye(len(list_continuous), len(list_discrete)).tolist() - for i in range(len(list_continuous)): - for j in range(len(list_discrete)): - mixed[i][j] = normalise_function( - normalise, - mixed_mutual_information( - np.array(list_continuous[i]), - np.array(list_discrete[j]), - n_neighbors=k, - base=base, - ), - continuous_mutual_information( - list_continuous[i], list_continuous[i], k=k, base=base - ), - entropy_discrete(list_discrete[j], base=base), - ) - mixed = pd.DataFrame(mixed) - continuous = continuous_iterate_function( - list_continuous, k=k, base=base, normalise=normalise - ) - continuous = pd.DataFrame(continuous) - discrete = discrete_iterate_function(list_discrete, base=base, normalise=normalise) - discrete = pd.DataFrame(discrete) - part1 = np.concatenate((continuous, mixed), axis=1) - part2 = np.concatenate((mixed.T, discrete), axis=1) - result = np.concatenate((part1, part2), axis=0) - return result.tolist() - - -# @utils.traceLogLight -def mutualInformation(data): - """ - the global matrix of mutual information - Parameters: - ----------- - list_continuous: list of list of the continuous variables, if there is no - continuous variables, please send an empty list of list [[]] - list_discrete: list of list of the discrete variables,if there is no discrete - variables, please send an empty list of list [[]] - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - normalise: the choice of normalize function : takes either 'max' or 'min' - or 'square root' or 'mean' or 'none' - - Returns: - ------- - Output: - an array of the mutual information between the variables: the continuous - ones only, the discrete ones only or the continuous and the discrete ones - """ - k = data["k"] - list_continuous = data["list_continuous"] - list_discrete = data["list_discrete"] - - if k >= len(list_continuous) + len(list_discrete): - return "k must be lower than the number of variables", 403 - - if "base" in data: - base = data["base"] - else: - base = 2 - - if "normalise" in data: - normalise = data["normalise"] - acceptedNormalise = ["max", "min", "square root", "mean", "none"] - - if normalise not in acceptedNormalise: - return ( - "normalise need to be either 'max' or 'min' or 'square root' or 'mean' or 'none'", - 403, - ) - else: - normalise = "max" - - # Calculate mutual information between features - if list_continuous != [[]] and list_discrete == [[]]: - return continuous_iterate_function( - list_continuous, k=k, base=base, normalise=normalise - ) - elif list_continuous == [[]] and list_discrete != [[]]: - return discrete_iterate_function(list_discrete, base=base, normalise=normalise) - elif list_continuous != [[]] and list_discrete != [[]]: - return mixed_iterate_function( - list_continuous, list_discrete, base=base, k=k, normalise=normalise - ) - else: - return "The lists are empty", 403 - - -# === Mutual Information higher dimension === -def averageDigamma(points, dvec): - """ - This part finds number of neighbors in some radius in the marginal space - - Parameters: - ---------- - - points: the data observed - dvec: A distance vector between points - - Returns: - -------- - Output: expectation value of - - """ - - N = len(points) - tree = ss.cKDTree(points) - avg = 0.0 - for i in range(N): - dist = dvec[i] - # subtlety, we don't include the boundary point, - # but we are implicitly adding 1 to kraskov definition because center point is included - num_points = len(tree.query_ball_point(points[i], dist - 1e-15, p=float("inf"))) - avg += digamma(num_points) / N - return avg - - -# @utils.traceLogLight -def higherDimensionMutualInformation(data): - """ - This function calculates the mutual information between several continuous - variables (3, 4 variables). It takes as input a list of lists [[variable1], - [variable2], [variable3], ...], the number K, and the base, either 2 or 10 - (the unit of information is respectively bits or nats). - - It returns the mutual information between the different variables. - - Regarding its representation in the tool, we can simply create a small window - where we select the variables, K, and the base, and display only the result. - Alternatively, in your 3D plot window, we can add the result of the mutual - information of the 3 variables below the graph. - - The mutual information estimator by Kraskov et al. - ith row of X represents ith dimension of the data, e.g. X = [[1.0,3.0,3.0],[0.1,1.2,5.4]], - if X has two dimensions and we have three samples - Parameters: - ---------- - X: list of list of the variables : it could take more than 2 variables - k: the number of neighbors to consider - base: The base in which the entropy value is represented, i.e 2 for bits, 10 for nats - - Returns: - -------- - Output: the mutual information between the variables - """ - - X = data["X"] - k = data["k"] - if k >= len(X): - return "k must be < to len(X)", 403 - - if "base" in data: - base = data["base"] - else: - base = 2 - - # adding small noise to X, e.g., x<-X+noise - x = [] - for i in range(len(X)): - tem = [] - for j in range(len(X[i])): - tem.append([X[i][j] + 1e-10 * nr.rand(1)[0]]) - x.append(tem) - - points = [] - for j in range(len(x[0])): - tem = [] - for i in range(len(x)): - tem.append(x[i][j][0]) - points.append(tem) - tree = ss.cKDTree(points) - dvec = [] - for i in range(len(x)): - dvec.append([]) - for point in points: - # Find k-nearest neighbors in joint space, p=inf means max norm - knn = tree.query(point, k + 1, p=float("inf")) - points_knn = [] - for i in range(len(x)): - dvec[i].append(float("-inf")) - points_knn.append([]) - for j in range(k + 1): - for i in range(len(x)): - points_knn[i].append(points[knn[1][j]][i]) - - # Find distances to k-nearest neighbors in each marginal space - for i in range(k + 1): - for j in range(len(x)): - if dvec[j][-1] < fabs(points_knn[j][i] - points_knn[j][0]): - dvec[j][-1] = fabs(points_knn[j][i] - points_knn[j][0]) - - ret = 0.0 - for i in range(len(x)): - ret -= averageDigamma(x[i], dvec[i]) - ret += ( - digamma(k) - - (float(len(x)) - 1.0) / float(k) - + (float(len(x)) - 1.0) * digamma(len(x[0])) - ) - if base == 2: - mul = 1 / log(2) # scaling factor from nats to bits - ret *= mul - return ret - - pass - - -# === Mutual Information matrix & Mutual Information higher dimension === -def mutualAndHigherInformation(data): - """ - calculate the mutual information estimator by Kraskov et al. - and the global matrix of mutual information - """ - - k = data["k"] - list_continuous = data["list_continuous"] - list_discrete = data["list_discrete"] - columns = list_continuous + list_discrete - if "base" in data: - base = data["base"] - else: - base = 2 - - if "normalise" in data: - normalise = data["normalise"] - else: - normalise = "max" - - if k >= len(columns): - return "k need to be < len(X)", 403 - - # higherDimensionMutualInformation - print("higherDimensionMutualInformation") - hdmi = higherDimensionMutualInformation( - {"k": k, "base": base, "X": list_continuous + list_discrete} - ) - - # mutualInformation - print("mutualInformation") - mi = mutualInformation( - { - "k": k, - "base": base, - "list_continuous": list_continuous, - "list_discrete": list_discrete, - "normalise": normalise, - } - ) - - return {"higherDimensionMutualInformation": hdmi, "mutualInformation": mi}, 200 diff --git a/build/lib/backend/controller/widgetConfigurations.py b/build/lib/backend/controller/widgetConfigurations.py deleted file mode 100644 index 90b3e3764..000000000 --- a/build/lib/backend/controller/widgetConfigurations.py +++ /dev/null @@ -1,28 +0,0 @@ -############################################################################# -# Imports -############################################################################# -import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils - -############################################################################# -# Widget configuration Management -############################################################################# - - -def get_all_configurations(): - configurations_overview = widgetConfUtils.get_configurations_overview() - return configurations_overview, 200 - - -def get_widget_configurations(widgetKey): - configurations = widgetConfUtils.get_configurations(widgetKey) - return configurations, 200 - - -def post_configuration(widgetKey, data): - widgetConfUtils.add_configuration(widgetKey, data) - return None, 204 - - -def delete_configuration(widgetKey, id): - widgetConfUtils.delete_configuration(widgetKey, id) - return None, 204 diff --git a/build/lib/backend/init.py b/build/lib/backend/init.py deleted file mode 100644 index d0f645742..000000000 --- a/build/lib/backend/init.py +++ /dev/null @@ -1,45 +0,0 @@ -# import backend.modules.dataProviders.dataProviderManager as dataProviderManager -# import backend.modules.exportMethods.exportUtils as exportUtils -# import backend.modules.algoProviders.algoProvidersManager as algoProvidersManager -# import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils -# import backend.utils.layouts.layouts as layoutsUtils -# import config.init_config as config - -from backend.modules.dataProviders import ( - dataProviderManager, -) -from backend.modules.exportMethods import ( - exportUtils, -) -from backend.modules.algoProviders import ( - algoProvidersManager, -) -from backend.utils.widgetConfigurations import ( - widgetConfigurations as widgetConfUtils, -) -from backend.utils.layouts import ( - layouts as layoutsUtils, -) -from backend.config import ( - init_config as config, -) - - -def init(): - # Init config file - config.init_config() - - # Init data providers - dataProviderManager.setup_data_providers() - - # Init AlgoProviders - algoProvidersManager.setup_algo_providers() - - # Init export methods - exportUtils.load_export_methods() - - # Init widget configurations - widgetConfUtils.setup_widget_configurations() - - # Init layouts - layoutsUtils.setup_layouts() diff --git a/build/lib/backend/modules/__init__.py b/build/lib/backend/modules/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/algoProviders/AlgoProvider.py b/build/lib/backend/modules/algoProviders/AlgoProvider.py deleted file mode 100644 index d446c655a..000000000 --- a/build/lib/backend/modules/algoProviders/AlgoProvider.py +++ /dev/null @@ -1,109 +0,0 @@ -# Class for AlgoProvider -import requests -import json -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException - - -class AlgoProvider: - def __init__(self, url, name): - self.url = url - self.name = name - self.alive = False - - def is_alive(self): - # Try to load algorithms - self.alive = True if self.get_algorithms() is not None else False - return self.alive - - def get_algorithms(self): - try: - r = requests.get(self.url + "/algorithms") - return get_http_response(r) - except ( - requests.exceptions.ConnectionError, - requests.exceptions.Timeout, - requests.exceptions.HTTPError, - ): - return None - - except Exception as e: - print("Error in get_algorithms") - print(e) - return None - - def to_json(self): - algorithms = None - if self.is_alive(): - algorithms = self.get_algorithms() - - return { - "name": self.name, - "url": self.url, - "status": self.alive, - "algorithms": algorithms, - } - - def use_algorithm(self, algorithm_id, data): - try: - print("Using algoProvider: " + self.url) - print("Using algorithm: " + algorithm_id) - r = requests.post( - self.url + "/algorithms/" + algorithm_id + "/run", json=data - ) - if r.raise_for_status() is None: - return get_valid_response(r) - except ( - requests.exceptions.ConnectionError, - requests.exceptions.Timeout, - ) as e: - print("The algoProvider is not reachable") - print(e) - raise AlgoProviderException("AlgoProvider not reachable", 500) - except requests.exceptions.HTTPError as e: - print("The algoProvider returned an error") - print(e) - print(e.response.text) - print(e.response.json()) - - if "detail" in e.response.json(): - raise AlgoProviderException(e.response.json()["detail"], 400) - - if e.response.status_code == 500: - raise AlgoProviderException( - "AlgoProvider internal server error: " + str(e), 500 - ) - elif e.response.status_code == 400: - raise AlgoProviderException(e.response.text, 400) - - elif e.response.status_code == 404: - raise AlgoProviderException( - "The algoProvider may not have this algorithm, " + e.response.text, - 404, - ) - else: - raise AlgoProviderException(str(e), 400) - - -# ==== Utils ==== -def get_http_response(response): - try: - if response.raise_for_status() is None: - return get_valid_response(response) - except requests.exceptions.HTTPError: - return get_error_response(response) - - -def get_valid_response(response): - if response.status_code == 204: - return True - try: - return response.json() - except json.decoder.JSONDecodeError: - return - - -def get_error_response(response): - if response.status_code == 500: - raise AlgoProviderException("AlgoProvider unexpected Error", 500) - - raise AlgoProviderException(response.text, response.status_code) diff --git a/build/lib/backend/modules/algoProviders/AlgoProviderException.py b/build/lib/backend/modules/algoProviders/AlgoProviderException.py deleted file mode 100644 index 8e3ebc24b..000000000 --- a/build/lib/backend/modules/algoProviders/AlgoProviderException.py +++ /dev/null @@ -1,15 +0,0 @@ -# Description: Exception class for AlgoProvider -class AlgoProviderException(Exception): - message = "AlgoProvider error" - status_code = 500 - - def __init__(self, message=None, status_code=None): - super(AlgoProviderException, self).__init__(message) - - if message is not None: - self.message = message - if status_code is not None: - self.status_code = status_code - - def __str__(self): - return self.message diff --git a/build/lib/backend/modules/algoProviders/__init__.py b/build/lib/backend/modules/algoProviders/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/algoProviders/algoProvidersManager.py b/build/lib/backend/modules/algoProviders/algoProvidersManager.py deleted file mode 100644 index 17d2bf79d..000000000 --- a/build/lib/backend/modules/algoProviders/algoProvidersManager.py +++ /dev/null @@ -1,114 +0,0 @@ -from termcolor import colored - -from backend.config.init_config import get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException -from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( - IntegratedAlgoProvider, -) - -algo_providers = [] - - -def setup_algo_providers(): - print("================== ALGO PROVIDERS ==================") - config = get_config() - config_algo_providers = config["ALGO_PROVIDERS"] - - keys = list(config_algo_providers.keys()) - values = list(config_algo_providers.values()) - - # Add AlgoProviders from config file - print(" - Loading Algo providers from config file") - for i in range(len(config_algo_providers)): - name = keys[i] - url = values[i] - - # Remove trailing slash - if url[-1] == "/": - url = url[:-1] - - print( - " - Adding AlgoProvider " - + colored(name, DEBUG_COLOR) - + " (" - + colored(url, DEBUG_COLOR) - + ")" - ) - try: - algo_provider = AlgoProvider(url, name) - algo_providers.append(algo_provider) - - if algo_provider.is_alive(): - print(colored(" [SUCCESS]", SUCCESS_COLOR) + " AlgoProvider ready") - else: - raise AlgoProviderException() - - except AlgoProviderException: - print( - colored(" [ERROR]", ERROR_COLOR) - + " AlgoProvider " - + colored(name, ERROR_COLOR) - + " is not accessible" - ) - - # Add the integrated algo provider - enable_integrated = config["ALGO_PROVIDERS_CONFIG"]["enable_integrated"] - if enable_integrated: - print(" - Adding integrated AlgoProviders") - algo_provider = IntegratedAlgoProvider() - nb_algos = len(algo_provider.get_algorithms()) - algo_providers.append(algo_provider) - - if nb_algos > 0: - print( - colored(" [SUCCESS]", SUCCESS_COLOR) - + " Integrated AlgoProvider ready with " - + str(nb_algos) - + " algorithms" - ) - else: - print(" No algorithms found") - - if len(algo_providers) == 0: - print("No Algo providers") - - -def get_algo_providers(): - return algo_providers - - -def get_algo_providers_json(): - algo_providers_json = [] - for algo_provider in algo_providers: - algo_providers_json.append(algo_provider.to_json()) - - return algo_providers_json - - -def algo_provider_exists(name): - for d in algo_providers: - if d.name == name: - return True - return False - - -def add(algo_provider): - algo_providers.append(algo_provider) - return - - -def get_single_algo_provider(name): - # Return the algo provider with the given name - for d in algo_providers: - if d.name == name: - return d - - raise AlgoProviderException("Algo provider not found", 404) - - -def delete(name): - for d in algo_providers: - if d.name == name: - algo_providers.remove(d) - return diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py deleted file mode 100644 index df545d8da..000000000 --- a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py +++ /dev/null @@ -1,99 +0,0 @@ -from ..utils import get_input_from_inputs - -# This algorithm is a simple classification metric calculator -# It takes a list of values corresponding to the ground truth -# and a list of values corresponding to the predictions - -# It returns a list of True/False values corresponding to -# whether the prediction is correct or not - -# It also returns the accuracy percentage and the number of correct predictions - -# Technical details (must respect the algo-api format): -algorithm_description = { - "name": "Classification Metric", - "description": """Calculates the classification error according \ -to the ground truth and the predictions""", - "author": "DebiAI", - "version": "1.0.0", - "creationDate": "2023-10-30", - "tags": ["metrics", "classification"], - "inputs": [ - { - "name": "Ground truth", - "description": "List of ground truth values", - "type": "array", - "arrayType": "text", - }, - { - "name": "Predictions", - "description": "List of predictions, must have the same \ -length as the ground truth list", - "type": "array", - "arrayType": "text", - }, - ], - "outputs": [ - { - "name": "Binary error", - "description": "Classification metric of the input list, \ -False if GDT == PRED, True otherwise", - "type": "array", - "arrayType": "boolean", - }, - { - "name": "Binary success", - "description": "Classification metric of the input list, \ -True if GDT == PRED, False otherwise", - "type": "array", - "arrayType": "boolean", - }, - { - "name": "Accuracy", - "description": "Percentage of correct predictions", - "type": "number", - }, - { - "name": "Number of correct predictions", - "type": "number", - }, - ], -} - - -def get_algorithm_details(): - return algorithm_description - - -def use_algorithm(inputs): - # Get inputs - gdt = get_input_from_inputs(inputs, "Ground truth", "array") - predictions = get_input_from_inputs(inputs, "Predictions", "array") - - # Check inputs - if len(gdt) != len(predictions): - raise TypeError("Ground truth and predictions must have the same length") - - # Calculate classification metric - binary_error = [None] * len(gdt) - binary_success = [None] * len(gdt) - nb_correct_predictions = 0 - for i in range(len(gdt)): - if gdt[i] == predictions[i]: - nb_correct_predictions += 1 - binary_error[i] = False - binary_success[i] = True - else: - binary_error[i] = True - binary_success[i] = False - - # Calculate accuracy - accuracy = nb_correct_predictions / len(binary_success) - - # Return outputs - return [ - {"name": "Binary error", "value": binary_error}, - {"name": "Binary success", "value": binary_success}, - {"name": "Accuracy", "value": accuracy}, - {"name": "Number of correct predictions", "value": nb_correct_predictions}, - ] diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py deleted file mode 100644 index 30315775e..000000000 --- a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py +++ /dev/null @@ -1,149 +0,0 @@ -from ..utils import get_input_from_inputs - -# This algorithm is a simple regression metric calculator -# It takes a list of numbers corresponding to an error -# and a ceil corresponding to the maximum acceptable error - -# It returns a list of True/False values corresponding to -# whether the error is acceptable or not - -# It also returns the percentage of acceptable errors - -# Technical details (must respect the algo-api format): -algorithm_description = { - "name": "Regression Metric", - "description": """Calculates the regression error according to the ground truth, \ -the predictions and a ceil value""", - "author": "DebiAI", - "version": "1.0.0", - "creationDate": "2023-05-23", - "tags": ["metrics", "regression"], - "inputs": [ - { - "name": "Ground truth", - "description": "List of ground truth values", - "type": "array", - "arrayType": "number", - }, - { - "name": "Predictions", - "description": "List of predictions, must have the same length as the \ -ground truth list", - "type": "array", - "arrayType": "number", - }, - { - "name": "Ceil", - "description": "Maximum acceptable error, depends on the use case, >= 0", - "type": "number", - "default": 5, - "availableValues": [0.1, 5, 100, 10000], - "min": 0, - }, - ], - "outputs": [ - { - "name": "Error", - "description": "Difference between the ground truth and the predictions", - "type": "array", - "arrayType": "number", - }, - { - "name": "Absolute error", - "description": "Absolute value of the error", - "type": "array", - "arrayType": "number", - }, - { - "name": "Binary error", - "description": "True if Absolute error > ceil, False otherwise", - "type": "array", - "arrayType": "boolean", - }, - { - "name": "Error percentage", - "type": "number", - }, - { - "name": "Binary success", - "description": "True if Absolute error <= ceil, False otherwise", - "type": "array", - "arrayType": "boolean", - }, - { - "name": "Success percentage", - "type": "number", - }, - ], -} - - -def get_algorithm_details(): - return algorithm_description - - -def use_algorithm(inputs): - # Get inputs - gdt = get_input_from_inputs(inputs, "Ground truth", "array", "number") - predictions = get_input_from_inputs(inputs, "Predictions", "array", "number") - ceil = get_input_from_inputs(inputs, "Ceil", "number") - - # Check inputs - if ceil < 0: - raise TypeError("Ceil must be positive") - - if len(gdt) != len(predictions): - raise TypeError("Ground truth and predictions must have the same length") - - # Calculate regression metric - nb_values = len(gdt) - error = [None] * nb_values - absolute_error = [None] * nb_values - binary_error = [None] * nb_values - binary_success = [None] * nb_values - - for i in range(nb_values): - error_value = gdt[i] - predictions[i] - error[i] = error_value - absolute_error[i] = abs(error_value) - - if abs(error_value) > ceil: - binary_error[i] = True - binary_success[i] = False - else: - binary_error[i] = False - binary_success[i] = True - - # Calculate percentages - error_percentage = binary_error.count(True) / nb_values - error_percentage = round(error_percentage * 100, 2) - success_percentage = binary_success.count(True) / nb_values - success_percentage = round(success_percentage * 100, 2) - - # Return outputs - return [ - { - "name": "Error", - "value": error, - }, - { - "name": "Absolute error", - "value": absolute_error, - }, - { - "name": "Binary error", - "value": binary_error, - }, - { - "name": "Error percentage", - "value": error_percentage, - }, - { - "name": "Binary success", - "value": binary_success, - }, - { - "name": "Success percentage", - "value": success_percentage, - }, - ] diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py deleted file mode 100644 index 4b27ea380..000000000 --- a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ /dev/null @@ -1,103 +0,0 @@ -import os -from termcolor import colored - -from backend.config.init_config import DEBUG_COLOR -from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException - - -def _get_algorithm_python(algorithm_name): - """Get the python file of the algorithm - - Args: - algorithm_name (str): Name of the algorithm - - Returns: - module: Python module of the algorithm - """ - - # Get the algorithm file - algorithm_file = None - for file in os.listdir(os.path.dirname(__file__) + "/algorithms"): - if file.endswith(".py") and file[:-3] == algorithm_name: - algorithm_file = file[:-3] - break - - # Check if the file exists - if algorithm_file is None: - raise ValueError("Algorithm " + algorithm_name + " does not exists") - - # Import the algorithm - algorithm_python = __import__( - "modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, - fromlist=["*"], - ) - - return (algorithm_name, algorithm_python) - - -class IntegratedAlgoProvider(AlgoProvider): - # Integrated AlgoProvider - # Used to expose the algorithms that are integrated - # directly in DebiAI - def __init__(self): - self.url = "/app/algo-provider" - self.name = "Integrated Algo-provider" - self.alive = True - - def is_alive(self): - return True - - def get_algorithms(self): - """Get all algorithms that DebiAI can provide - Returns: - list: List of algorithms - """ - - # List the .py files if the algorithms folder - algorithm_files = [] - for file in os.listdir(os.path.dirname(__file__) + "/algorithms"): - if file.endswith(".py") and file != "__init__.py": - algorithm_files.append(file[:-3]) - - # Import the algorithms - algorithms_python = [] - for file in algorithm_files: - print(" Importing " + colored(file, DEBUG_COLOR)) - try: - algorithms_python.append(_get_algorithm_python(file)) - except ModuleNotFoundError as e: - print("Error importing " + file) - print(e) - - # Get the algorithms (call the get_algorithm_details() function) - algorithms = [] - for algorithm in algorithms_python: - algorithm_details = algorithm[1].get_algorithm_details() - # Add the id as the file name - algorithm_details["id"] = algorithm[0] - algorithms.append(algorithm_details) - - return algorithms - - def use_algorithm(self, algorithm_id, data): - try: - print("Using integrated algo-provider") - print("Using algorithm: " + algorithm_id) - algorithm = _get_algorithm_python(algorithm_id) - - # Use the algorithm - outputs = algorithm[1].use_algorithm(data["inputs"]) - - return outputs - - except TypeError as e: - print("The integrated algo-provider returned an error") - print(e) - raise AlgoProviderException( - algorithm_id + " returned an error: " + str(e), 400 - ) - except Exception as e: - print("The integrated algo-provider returned an error") - print(e) - raise AlgoProviderException("AlgoProvider internal server error", 500) diff --git a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py b/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py deleted file mode 100644 index bbb3a1271..000000000 --- a/build/lib/backend/modules/algoProviders/integratedAlgoProvider/utils.py +++ /dev/null @@ -1,59 +0,0 @@ -def get_input_from_inputs( - inputs, input_name, expected_input_type=None, expected_list_type=None -): - # Get the input from of the inputs list from a given name - # Check the type and the subtype if needed - - for i, input in enumerate(inputs): - if "name" not in input: - raise TypeError("Input n°{} has no name".format(i)) - - if "value" not in input: - raise TypeError("Input {} has no value".format(input["name"])) - - if input["name"] == input_name: - # Check the type - if expected_input_type == "number": - if not isinstance(input["value"], (int, float)): - raise TypeError( - "Input {} is not a number, but a {}".format( - input_name, type(input["value"]) - ) - ) - elif expected_input_type == "string": - if not isinstance(input["value"], str): - raise TypeError( - "Input {} is not a string but a {}".format( - input_name, type(input["value"]) - ) - ) - elif expected_input_type == "array": - if not isinstance(input["value"], list): - raise TypeError( - "Input {} is not an array but a {}".format( - input_name, type(input["value"]) - ) - ) - - # Check the subtype - if expected_list_type == "number": - for value in input["value"]: - if not isinstance(value, (int, float)): - raise TypeError( - "Input {} is not an array of numbers but of {}".format( - input_name, type(value) - ) - ) - elif expected_list_type == "string": - for value in input["value"]: - if not isinstance(value, str): - raise TypeError( - "Input {} is not an array of strings but of {}".format( - input_name, type(value) - ) - ) - - # Return the value - return input["value"] - - raise TypeError("Input {} not found in inputs".format(input_name)) diff --git a/build/lib/backend/modules/dataProviders/DataProvider.py b/build/lib/backend/modules/dataProviders/DataProvider.py deleted file mode 100644 index 0d25fd65c..000000000 --- a/build/lib/backend/modules/dataProviders/DataProvider.py +++ /dev/null @@ -1,77 +0,0 @@ -from abc import ABC, abstractmethod, abstractproperty - - -class DataProvider(ABC): - # Data - @abstractproperty - def name(self): - pass - - @abstractproperty - def is_alive(self): - return False - - @abstractproperty - def type(self): - pass - - # Info - @abstractmethod - def get_info(self): - pass - - # Projects - @abstractmethod - def get_projects(self): - pass - - @abstractmethod - def get_project(self, id): - pass - - @abstractmethod - def delete_project(self, _id): - pass - - # Samples - @abstractmethod - def get_id_list(self, _projectId, _analysis, _from, _to): - pass - - @abstractmethod - def get_samples(self, _projectId, _analysis, id_list): - pass - - # Selections - @abstractmethod - def get_selections(self): - pass - - @abstractmethod - def get_selection_id_list(self, id): - pass - - @abstractmethod - def create_selection(self, name, id_list): - pass - - @abstractmethod - def delete_selection(self, id): - pass - - # Models - @abstractmethod - def get_models(self): - pass - - @abstractmethod - def get_model_results_id_list(self): - pass - - @abstractmethod - def get_model_results(self, id_list): - pass - - @abstractmethod - def delete_model(self, id): - pass diff --git a/build/lib/backend/modules/dataProviders/DataProviderException.py b/build/lib/backend/modules/dataProviders/DataProviderException.py deleted file mode 100644 index 5ebe02f2b..000000000 --- a/build/lib/backend/modules/dataProviders/DataProviderException.py +++ /dev/null @@ -1,15 +0,0 @@ -# Description: Exception class for data providers -class DataProviderException(Exception): - message = "Data provider error" - status_code = 500 - - def __init__(self, message=None, status_code=None): - super(DataProviderException, self).__init__(message) - - if message is not None: - self.message = message - if status_code is not None: - self.status_code = status_code - - def __str__(self): - return self.message diff --git a/build/lib/backend/modules/dataProviders/__init__.py b/build/lib/backend/modules/dataProviders/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/dataProviderManager.py b/build/lib/backend/modules/dataProviders/dataProviderManager.py deleted file mode 100644 index 2b651dbb7..000000000 --- a/build/lib/backend/modules/dataProviders/dataProviderManager.py +++ /dev/null @@ -1,127 +0,0 @@ -from termcolor import colored - -from backend.config.init_config import ( - get_config, - DEBUG_COLOR, - ERROR_COLOR, - SUCCESS_COLOR, -) -from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( - WebDataProvider, -) -from backend.modules.dataProviders.pythonDataProvider.PythonDataProvider import ( - PythonDataProvider, - PYTHON_DATA_PROVIDER_ID, -) -from backend.modules.dataProviders.DataProviderException import DataProviderException - -data_providers_list = [] -python_data_provider_disabled = True - - -def setup_data_providers(): - global python_data_provider_disabled - print("================== DATA PROVIDERS ==================") - config = get_config() - web_data_provider_config = config["WEB_DATA_PROVIDERS"] - python_module_data_provider_config = config["INTEGRATED_DATA_PROVIDER"] - - keys = list(web_data_provider_config.keys()) - values = list(web_data_provider_config.values()) - - # Web Data Providers - for i in range(len(web_data_provider_config)): - name = keys[i] - url = values[i] - - # Remove trailing slash - if url[-1] == "/": - url = url[:-1] - - print( - " - Adding external data Provider " - + colored(name, DEBUG_COLOR) - + " (" - + url - + ")" - ) - try: - data_provider = WebDataProvider(url, name) - add(data_provider) - - if data_provider.is_alive(): - print(colored(" [SUCCESS]", SUCCESS_COLOR) + " Data Provider ready") - else: - raise DataProviderException() - except DataProviderException: - print( - colored(" [ERROR]", ERROR_COLOR) - + " : Data Provider " - + colored(name, ERROR_COLOR) - + " is not accessible" - ) - # Python Data Providers - if python_module_data_provider_config["enabled"]: - print(" - Adding Python Module data Provider") - add(PythonDataProvider()) - python_data_provider_disabled = False - - if len(data_providers_list) == 0: - print(" No data providers configured") - - -def data_provider_exists(name): - for d in data_providers_list: - if d.name == name: - return True - return False - - -def is_valid_name(name): - # /, &, | are not allowed in data provider names - if ( - "/" in name - or "&" in name - or "|" in name - or len(name) == 0 - or len(name) > 50 - or name[0] == " " - or name[-1] == " " - ): - return False - - return True - - -def add(data_provider): - data_providers_list.append(data_provider) - return - - -def get_data_provider_list(): - return data_providers_list - - -def get_single_data_provider(name): - # Check if the data provider is not disabled - if name == PYTHON_DATA_PROVIDER_ID and python_data_provider_disabled: - raise DataProviderException("Python module data provider is disabled", 403) - - # Return the data provider with the given name - for d in data_providers_list: - if d.name == name: - return d - - raise DataProviderException("Data provider not found", 404) - - -def delete(name): - for d in data_providers_list: - if d.name == name: - if d.type == "Python module Data Provider": - raise DataProviderException( - "Python module data provider cannot be deleted", 403 - ) - - data_providers_list.remove(d) - return diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py deleted file mode 100644 index c143d7a76..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py +++ /dev/null @@ -1,250 +0,0 @@ -from backend.config.init_config import get_config -from backend.modules.dataProviders.DataProvider import DataProvider -from backend.modules.dataProviders.DataProviderException import DataProviderException -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( - pythonModuleUtils, - projects, - samples, - selections, - models, - tree, -) - -from backend.utils.utils import get_app_version - -PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" - - -# Wrappers -def project_must_exist(func): - def wrapper(*args, **kwargs): - if len(args) < 2: - raise Exception("Project id must be provided as first argument") - - project_id = args[1] - - if not projects.project_exist(project_id): - raise DataProviderException("Project " + project_id + " not found", 404) - - return func(*args, **kwargs) - - return wrapper - - -class PythonDataProvider(DataProvider): - # Generic functions - def __init__(self): - pythonModuleUtils.init() - nb_projects = len(projects.get_projects()) - print( - " Python module Data Provider initialized with " - + str(nb_projects) - + " projects" - ) - - @property - def name(self): - return PYTHON_DATA_PROVIDER_ID - - @property - def type(self): - return PYTHON_DATA_PROVIDER_ID - - def is_alive(self): - return True - - def get_info(self): - # Request method to get info on data Provider - # return Object { version, dp_name, nb_Sample_max(to load)} - return { - "version": get_app_version(), - "maxSampleIdByRequest": 10000, - "maxSampleDataByRequest": 2000, - "maxResultByRequest": 5000, - "canDelete": { - "projects": True, - "selections": True, - "models": True, - }, - } - - # Projects - def get_projects(self): - # Request method to get projects overview - # Return Arr[object{ id, name, nb_samples, nb_models, nb_selections, - # update_time, creation_time}] - return projects.get_projects() - - def create_project(self, name): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_projects"] - if not creation_allowed: - raise DataProviderException("Project creation is not allowed", 403) - - # Project must not already exist - if projects.project_exist(name): - raise DataProviderException("Project already exists", 400) - - return projects.create_project(name, name) - - @project_must_exist - def get_project(self, project_id): - # Request method to get projects overview - # Return object{ id, name, nb_samples, nb_models, nb_selections, - # update_time, creation_time} - - project_base_info = projects.get_project(project_id) - project_base_info["selections"] = selections.get_selections(project_id) - project_base_info["resultStructure"] = projects.get_result_structure(project_id) - project_base_info["models"] = models.get_models(project_id) - return project_base_info - - @project_must_exist - def delete_project(self, project_id): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_projects"] - if not creation_allowed: - raise DataProviderException("Project deletion is not allowed", 403) - - # Request method to delete project - projects.delete_project(project_id) - - # Id list - @project_must_exist - def get_id_list(self, project_id, analysis, _from=None, _to=None): - # Get id list - # Return Arr[id] - return samples.get_all_samples_id_list(project_id, _from, _to) - - @project_must_exist - def get_samples(self, project_id, analysis, id_list): - # Get full data from id list - # Return object { id: [data]} - return samples.get_data_from_sample_id_list(project_id, id_list) - - # Selections - @project_must_exist - def get_selections(self, project_id): - # Get selections on project - # Return arr[object{ id, name, creation_time, nb_samples}] - return selections.get_selections(project_id) - - @project_must_exist - def get_selection_id_list(self, project_id, selection_id): - # Get selections id for a project - # Return selection ID list - return selections.get_selection_id_list(project_id, selection_id) - - @project_must_exist - def create_selection(self, project_id, name, id_list, request_id=None): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_selections"] - if not creation_allowed: - raise DataProviderException("Selection creation is not allowed", 403) - - # Selection creation - return selections.create_selection(project_id, name, id_list, request_id) - - @project_must_exist - def delete_selection(self, project_id, selection_id): - # Check config - config = get_config() - deletion_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_selections"] - if not deletion_allowed: - raise DataProviderException("Selection deletion is not allowed", 403) - - # Selection deletion - return selections.delete_selection(project_id, selection_id) - - # Models - @project_must_exist - def get_models(self, project_id): - return models.get_models(project_id) - - @project_must_exist - def get_model_results_id_list(self, project_id, model_id): - return models.get_model_id_list(project_id, model_id) - - @project_must_exist - def get_model_results(self, project_id, model_id, id_list): - return models.get_model_results(project_id, model_id, id_list) - - # Python module specific functions - - @project_must_exist - def update_block_structure(self, project_id, blockStructure): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_projects"] - if not creation_allowed: - raise DataProviderException("Project creation is not allowed", 403) - - # Update block structure - projects.update_block_structure(project_id, blockStructure) - - @project_must_exist - def add_block_tree(self, project_id, data): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_data"] - if not creation_allowed: - raise DataProviderException("Data insertion is not allowed", 403) - - # Insert data - return tree.add_block_tree(project_id, data) - - @project_must_exist - def update_results_structure(self, project_id, resultsStructure): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_results"] - if not creation_allowed: - raise DataProviderException("Results insertion is not allowed", 403) - - # TODO : check resultStructure (type and default type ==) - existing_result_structure = projects.get_result_structure(project_id) - if existing_result_structure is not None: - raise DataProviderException( - "project " + project_id + " already have a results structure", 403 - ) - - projects.update_results_structure(project_id, resultsStructure) - - @project_must_exist - def create_model(self, project_id, data): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_create_models"] - if not creation_allowed: - raise DataProviderException("Model creation is not allowed", 403) - - models.create_model( - project_id, data["name"], data["metadata"] if "metadata" in data else None - ) - - @project_must_exist - def delete_model(self, project_id, model_id): - # Check config - config = get_config() - deletion_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_delete_models"] - if not deletion_allowed: - raise DataProviderException("Model deletion is not allowed", 403) - - # Check if model exist - if not models.model_exist(project_id, model_id): - raise DataProviderException("Model does not exist", 404) - - models.delete_model(project_id, model_id) - - @project_must_exist - def add_results_dict(self, project_id, model_id, data): - # Check config - config = get_config() - creation_allowed = config["INTEGRATED_DATA_PROVIDER"]["allow_insert_results"] - if not creation_allowed: - raise DataProviderException("Results insertion is not allowed", 403) - - models.add_results_dict(project_id, model_id, data) diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py deleted file mode 100644 index 844a309fa..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py +++ /dev/null @@ -1,60 +0,0 @@ -import hashlib -import ujson as json - -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils - -DATA_PATH = pythonModuleUtils.DATA_PATH - - -def hash(text: str): - return hashlib.sha256(text.encode("utf-8")).hexdigest() - - -# hash -def __createProjectHashMap(projectId, blockPath, hashmap, sampleLevel, currentLevel): - blockPath += "/" - if currentLevel == sampleLevel: - # We are at the sample level, we can fill the hashmap - sampleHash = hash(blockPath) - hashmap[sampleHash] = blockPath - - # Update the sample - pythonModuleUtils.updateJsonFile( - DATA_PATH + projectId + "/blocks/" + blockPath + "info.json", - "id", - sampleHash, - ) - return - - for children in pythonModuleUtils.listDir( - DATA_PATH + projectId + "/blocks/" + blockPath - ): - __createProjectHashMap( - projectId, blockPath + children, hashmap, sampleLevel, currentLevel + 1 - ) - - -def addToSampleHashmap(projectId, hashMap): - with open(DATA_PATH + projectId + "/samplesHashmap.json") as json_file: - existingHm = json.load(json_file) - - existingHm.update(hashMap) - - pythonModuleUtils.writeJsonFile( - DATA_PATH + projectId + "/samplesHashmap.json", existingHm - ) - - -def getHashmap(projectId): - with open(DATA_PATH + projectId + "/samplesHashmap.json") as json_file: - existingHm = json.load(json_file) - - return existingHm - - -def getPathFromHashList(projectId, hashArray): - hm = getHashmap(projectId) - ret = [] - for hash in hashArray: - ret.append(hm[hash]) - return ret diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py deleted file mode 100644 index 075df2087..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py +++ /dev/null @@ -1,281 +0,0 @@ -import os -import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( - pythonModuleUtils, - projects, - tree, -) -from backend.modules.dataProviders.DataProviderException import DataProviderException - -DATA_PATH = pythonModuleUtils.DATA_PATH - - -#  Models -def get_model_ids(project_id): - return os.listdir(DATA_PATH + project_id + "/models/") - - -def get_models(project_id): - ret = [] - for model in os.listdir(DATA_PATH + project_id + "/models/"): - with open( - DATA_PATH + project_id + "/models/" + model + "/info.json" - ) as json_file: - info = json.load(json_file) - ret.append( - { - "name": model, - "id": model, - "creationDate": info["creationDate"], - "updateDate": info["updateDate"], - "version": "0.0.0", - "metadata": info["metadata"], - "nbResults": info["nbResults"], - } - ) - - return ret - - -def model_exist(project_id, model_id): - return model_id in get_model_ids(project_id) - - -def create_model(project_id, model_name, metadata=None): - # ParametersCheck - if not pythonModuleUtils.is_filename_clean(model_name): - raise DataProviderException("Model name contain invalid characters", 402) - - model_id = model_name - - if model_exist(project_id, model_id): - raise DataProviderException("Model " + model_id + " already exists", 409) - - if metadata is None: - metadata = {} - - # model - modelFolderPath = DATA_PATH + project_id + "/models/" + model_id - os.mkdir(modelFolderPath) - - now = pythonModuleUtils.timeNow() - - model_info = { - "name": model_id, - "id": model_id, - "creationDate": now, - "updateDate": now, - "metadata": metadata, - "nbResults": 0, - } - - pythonModuleUtils.writeJsonFile(modelFolderPath + "/info.json", model_info) - - # Add 0 results to init the file - write_model_results(project_id, model_id, {}) - - -def delete_model(project_id, model_id): - pythonModuleUtils.deleteDir(DATA_PATH + project_id + "/models/" + model_id) - - -def write_model_results(project_id, model_id, results): - pythonModuleUtils.writeJsonFile( - DATA_PATH + project_id + "/models/" + model_id + "/results.json", results - ) - projects.update_project(project_id) - - -def get_model_results(project_id, model_id, sample_ids): - # Check parameters - if not projects.project_exist(project_id): - raise ("Project '" + project_id + "' doesn't exist") - if not model_exist(project_id, model_id): - raise ("Model " + model_id + " does not exist") - - # Get model results - with open( - DATA_PATH + project_id + "/models/" + model_id + "/results.json", "r" - ) as jsonFile: - model_results = json.load(jsonFile) - - # if not selection_id: - # return d - # else: - # selectionSamples = set( - # selections.getSelectionSamples(project_id, selection_id)) - # return selectionSamples.intersection_update(d) - # model_results = getModelResults(project_id, model_id) - - ret = {} - for sample_id in sample_ids: - if sample_id in model_results: - ret[sample_id] = model_results[sample_id] - # Not sending error if sample not found in model results at the moment - # else: - # raise ValueError("Sample " + sample_id + - # " not found in model results") - return ret - - -def get_model_id_list(project_id, model_id) -> list: - # Get model results - with open( - DATA_PATH + project_id + "/models/" + model_id + "/results.json", "r" - ) as jsonFile: - model_results = json.load(jsonFile) - return dict.keys(model_results) - - -# def get_model_list_results(project_id, model_ids: list, common: bool) -> list: -# samples = set(get_model_results(project_id, model_ids[0])) - -# for model_id in model_ids[1:]: -# if common: # Common samples between models -# samples.intersection_update( -# get_model_results(project_id, model_id)) -# else: # Union of the model results samples -# samples = samples.union(get_model_results(project_id, model_id)) - -# return list(samples) - - -def add_results_dict(project_id, modelId, data): - tree = data["results"] - - # Check parameters - if not projects.project_exist(project_id): - raise "Project '" + project_id + "' doesn't exist" - - if not model_exist(project_id, modelId): - raise ( - "Model '" + modelId + "' in project : '" + project_id + "' doesn't exist" - ) - - # Get resultStructure & project_block_structure - result_structure = projects.get_result_structure(project_id) - if result_structure is None: - raise ( - "The project expected results need to be specified before adding results" - ) - - if "expected_results_order" in data: - expected_results_order = data["expected_results_order"] - else: - expected_results_order = list(map(lambda r: r["name"], result_structure)) - - project_block_structure = projects.get_project_block_level_info(project_id) - sampleIndex = len(project_block_structure) - 1 - - # Check the given expected_results_order - for expected_result in result_structure: - if expected_result["name"] not in expected_results_order: - raise ( - "The expected result '" - + expected_result["name"] - + "' is missing from the expected_results_order Array" - ) - - giv_exp_res = {} - for given_expected_result in expected_results_order: - result_expected = False - for i, expected_result in enumerate(result_structure): - if given_expected_result == expected_result["name"]: - result_expected = True - - # Map the expected_results_order indexes to result_structure - giv_exp_res[given_expected_result] = i - - if not result_expected: - return ( - "The given expected result '" - + given_expected_result - + "' is not an expected result", - 403, - ) - - #  Check if all blocks referenced in the result tree exists - resultsToAdd = {} - - for blockKey in tree: - ok, msg = __check_blocks_of_tree_exists( - project_id, - result_structure, - giv_exp_res, - tree[blockKey], - 0, - sampleIndex, - blockKey, - resultsToAdd, - ) - if not ok: - print(msg) - return msg, 403 - - # The given tree is compliant, let's add the results - newResults = pythonModuleUtils.addToJsonFIle( - DATA_PATH + project_id + "/models/" + modelId + "/results.json", resultsToAdd - ) - - pythonModuleUtils.addToJsonFIle( - DATA_PATH + project_id + "/models/" + modelId + "/info.json", - {"nbResults": len(newResults), "updateDate": pythonModuleUtils.timeNow()}, - ) - projects.update_project(project_id) - return 200 - - -def __check_blocks_of_tree_exists( - project_id: str, - result_structure: list, - giv_exp_res: dict, - block: dict, - level: int, - sampleIndex: int, - path: str, - resultsToAdd: dict, -): - # Check block exist in the data - blockInfo = tree.findBlockInfo(project_id, path) - if not blockInfo: - return ( - False, - "Error while adding the results : block '" + path + "' doesn't exist", - ) - - if level == sampleIndex: - path += "/" - resultsToAdd[blockInfo["id"]] = [] - #  Sample level : the results : they need to be verified - if len(block) != len(giv_exp_res): - raise ValueError( - "in : " - + path - + ", " - + str(len(block)) - + " value where given but " - + str(len(giv_exp_res)) - + "where expected" - ) - - for result in result_structure: - resultsToAdd[blockInfo["id"]].append(block[giv_exp_res[result["name"]]]) - # TODO Deal with defaults results and check type - - return True, None - - for subBlockKey in block: - ok, msg = __check_blocks_of_tree_exists( - project_id, - result_structure, - giv_exp_res, - block[subBlockKey], - level + 1, - sampleIndex, - path + "/" + str(subBlockKey), - resultsToAdd, - ) - if not ok: - return False, msg - - return True, "" diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py deleted file mode 100644 index dcecd3f8a..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +++ /dev/null @@ -1,256 +0,0 @@ -import os -import shutil -import ujson as json - -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash - -DATA_PATH = pythonModuleUtils.DATA_PATH - - -def project_exist(projectId): - return projectId in os.listdir(DATA_PATH) - - -def get_project(projectId): - try: - # Json info file - if not os.path.exists(DATA_PATH + projectId + "/info.json"): - raise Exception('The "info.json" file is missing') - - with open(DATA_PATH + projectId + "/info.json") as json_file: - data = json.load(json_file) - - if "name" not in data: - raise Exception("The project name is missing from the info.json file") - - if "creationDate" not in data: - raise Exception( - "The project creationDate is missing from the info.json file" - ) - - if "updateDate" not in data: - raise Exception("The project updateDate is missing from the info.json file") - - name = data["name"] - creationDate = data["creationDate"] - updateDate = data["updateDate"] - - # Nb models - if not os.path.exists(DATA_PATH + projectId + "/models/"): - raise Exception('The "models" folder is missing') - - nbModels = len(os.listdir(DATA_PATH + projectId + "/models/")) - - # Nb selection - if not os.path.exists(DATA_PATH + projectId + "/selections/"): - raise Exception('The "selections" folder is missing') - - nbSelection = len(os.listdir(DATA_PATH + projectId + "/selections/")) - - # Nb samples - if not os.path.exists(DATA_PATH + projectId + "/samplesHashmap.json"): - raise Exception('The "samplesHashmap.json" file is missing') - - nbSamples = len(hash.getHashmap(projectId)) - - # project columns - projectColumns = get_project_columns(projectId) - - # project block level - # We still need to get the project block level, the Python module use it - projectBlockLevel = get_project_block_level_info(projectId) - - projectOverview = { - "id": projectId, - "name": name, - "nbModels": nbModels, - "nbSelections": nbSelection, - "nbSamples": nbSamples, - "creationDate": creationDate, - "updateDate": updateDate, - "columns": projectColumns, - "blockLevelInfo": projectBlockLevel, - } - - except Exception as e: - print("Error while getting the project overview: " + projectId) - print(e) - projectOverview = { - "id": projectId, - "name": projectId, - } - - return projectOverview - - -def get_projects(): - project = [] - - for projectId in os.listdir(DATA_PATH): - project.append(get_project(projectId)) - - return project - - -def create_project(projectId, projectName): - # Create the project files and folders - os.mkdir(DATA_PATH + projectId) - os.mkdir(DATA_PATH + projectId + "/blocks") - os.mkdir(DATA_PATH + projectId + "/models") - os.mkdir(DATA_PATH + projectId + "/selections") - - now = pythonModuleUtils.timeNow() - projectInfo = { - "name": projectName, - "id": projectId, - "creationDate": now, - "updateDate": now, - "blockLevelInfo": [], - } - - pythonModuleUtils.writeJsonFile(DATA_PATH + projectId + "/info.json", projectInfo) - pythonModuleUtils.writeJsonFile(DATA_PATH + projectId + "/samplesHashmap.json", {}) - - return projectInfo - - -def update_project(projectId): - # Change the update date of the project to now - pythonModuleUtils.updateJsonFile( - DATA_PATH + projectId + "/info.json", "updateDate", pythonModuleUtils.timeNow() - ) - - -def get_project_block_level_info(projectId): - if not os.path.isfile(DATA_PATH + projectId + "/info.json"): - raise Exception( - "The project '" + projectId + "' doesn't have an info.json file" - ) - - with open(DATA_PATH + projectId + "/info.json") as json_file: - return json.load(json_file)["blockLevelInfo"] - - -def get_project_columns(projectId): - block_level_info = get_project_block_level_info(projectId) - - # Convert the block level info to the new columns format - # blockLevelInfo: - # [ - # { "name": "block1" }, - # { - # "name": "block2", - # "contexts": [ - # { "name": "cont1", "type": "text", group:"group_1" }, - # { "name": "cont2", "type": "text", group:"group_1" }, - # ] - # }, - # { "name": "block3", "contexts": [ - # { "name": "cont3", "type": "text", group:"group_1" } - # ] - # }, - # { - # "name": "block4", - # "others": [{ "name": "other1", "type": "number" }], - # "groundTruth": [ - # { "name": "gdt1", "type": "number" }, - # { "name": "gdt2", "type": "number" }, - # ], - # "inputs": [ - # { "name": "inp1", "type": "number" } - # ] - # } - # ] - - # Goal format: - # [ - # { "name": "block1", "category": "other", "type": "auto" }, - # { "name": "block2", "category": "other", "type": "auto" }, - # { "name": "cont1", "category": "context", "type": "text", group: "group_1" }, - # { "name": "cont2", "category": "context", "type": "text", group: "group_1" }, - # { "name": "cont3", "category": "context", "type": "text", group: "group_1" }, - # { "name": "block3", "category": "other", "type": "auto" }, - # { "name": "other1", "category": "other", "type": "number" }, - # { "name": "block4", "category": "other", "type": "auto" }, - # { "name": "gdt1", "category": "groundtruth", "type": "number" }, - # { "name": "gdt2", "category": "groundtruth", "type": "number" }, - # { "name": "inp1", "category": "input", "type": "number" }, - # ] - - project_columns = [] - - def create_column(col, category): - column = {"name": col["name"], "category": category, "type": col["type"]} - - if "group" in col: - column["group"] = col["group"] - - return column - - for block in block_level_info: - block_name = block["name"] - project_columns.append( - {"name": block_name, "category": "other", "type": "auto"} - ) - - if "groundTruth" in block: - for ground_truth in block["groundTruth"]: - project_columns.append(create_column(ground_truth, "groundtruth")) - - if "contexts" in block: - for context in block["contexts"]: - project_columns.append(create_column(context, "context")) - - if "inputs" in block: - for input in block["inputs"]: - project_columns.append(create_column(input, "input")) - - if "others" in block: - for other in block["others"]: - project_columns.append(create_column(other, "other")) - - return project_columns - - -def get_result_structure(projectId): - with open(DATA_PATH + projectId + "/info.json") as json_file: - projectInfo = json.load(json_file) - if "resultStructure" in projectInfo: - return projectInfo["resultStructure"] - else: - return None - - -def delete_project(projectId): - # Delete the project files and folders - try: - shutil.rmtree(DATA_PATH + projectId) - except Exception as e: - print(e) - raise "Something went wrong when deleting the project" - - -def update_block_structure(projectId, blockStructure): - try: - pythonModuleUtils.updateJsonFile( - DATA_PATH + projectId + "/info.json", "blockLevelInfo", blockStructure - ) - - update_project(projectId) - except Exception as e: - print(e) - raise Exception("Something went wrong updating project structure") - - -def update_results_structure(projectId, resultStructure): - try: - # save resultStructure - pythonModuleUtils.updateJsonFile( - DATA_PATH + projectId + "/info.json", "resultStructure", resultStructure - ) - update_project(projectId) - return resultStructure, 200 - - except Exception as e: - print(e) - raise "Something went wrong updating project structure" diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py deleted file mode 100644 index f98261689..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py +++ /dev/null @@ -1,121 +0,0 @@ -import ujson as json -import os -import posixpath -import unicodedata -import string -import shutil -import time - -DATA_PATH = "data/pythonDataProvider/" - -DATA_TYPES = ["groundTruth", "contexts", "inputs", "others"] - - -# Init, called at the server start -def init(): - # Create the projects data directory - try: - os.makedirs(DATA_PATH) - except FileExistsError: - # Data already initiated - pass - - -# File name verifications -def clean_filename(filename): - # replace spaces - filename = filename.replace(" ", "_") - - # keep only valid ascii chars - cleaned_filename = ( - unicodedata.normalize("NFKD", filename).encode("ASCII", "ignore").decode() - ) - - # keep only whitelisted chars - whitelist = "_-() %s%s" % (string.ascii_letters, string.digits) - char_limit = 255 - - cleaned_filename = "".join(c for c in cleaned_filename if c in whitelist) - return cleaned_filename[:char_limit] - - -def is_filename_clean(filename): - cleanFilename = "".join(i for i in filename if i not in "\/:*?<>|") # noqa - return filename == cleanFilename - - -def is_secure_path(path): - path = posixpath.normpath(path) - return not path.startswith(("/", "../")) - - -# directories and file Manipulation -def fileExist(path): - return os.path.isfile(path) - - -def listDir(path): - # List the directories only - return [ - name for name in os.listdir(path) if os.path.isdir(os.path.join(path, name)) - ] - - -def deleteFile(filePath): - os.remove(filePath) - - -def deleteDir(dirPath): - deleteFiles = [] - deleteDirs = [] - for root, dirs, files in os.walk(dirPath): - for f in files: - deleteFiles.append(os.path.join(root, f)) - for d in dirs: - deleteDirs.append(os.path.join(root, d)) - for f in deleteFiles: - os.remove(f) - for d in deleteDirs: - os.rmdir(d) - os.rmdir(dirPath) - - -def copyDir(src, dest): - shutil.copytree(src, dest) - - -# Json files -def readJsonFile(path): - with open(path, "r") as jsonFile: - return json.load(jsonFile) - - -def writeJsonFile(path, obj): - with open(path, "w") as outfile: - json.dump(obj, outfile) - - -def updateJsonFile(path, key, data): - with open(path, "r") as jsonFile: - d = json.load(jsonFile) - - d[key] = data - - with open(path, "w") as jsonFile: - json.dump(d, jsonFile) - return d - - -def addToJsonFIle(path, dictToAdd: dict): - with open(path, "r") as jsonFile: - d = json.load(jsonFile) - - d = {**d, **dictToAdd} - with open(path, "w") as jsonFile: - json.dump(d, jsonFile) - return d - - -# Date -def timeNow(): - return time.time() * 1000 diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py deleted file mode 100644 index 6ee3676ef..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py +++ /dev/null @@ -1,130 +0,0 @@ -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( - pythonModuleUtils, - tree, - hash, -) - -DATA_PATH = pythonModuleUtils.DATA_PATH -DATA_TYPES = pythonModuleUtils.DATA_TYPES - -# ID list - - -def get_all_samples_id_list(project_id, _from=None, _to=None): - """ - Return a list of all samples id in a project - """ - # Get the hashmap - hashmap = hash.getHashmap(project_id) - - # Get all samples - samples = list(hashmap.keys()) - - # In case of streaming purpose - if _from is not None and _to is not None: - samples = samples[_from : _to + 1] # noqa - - return samples - - -# Get data -def get_data_from_sample_id_list(project_id, id_list): - # Get path of the samples from the hashmap - sample_path = hash.getPathFromHashList(project_id, id_list) - data = {} - - # We age going through each samples individually because of a bug - # (the data aren't aligned with the requested samples id) - # Because of this bug, we are slowing down the process - # TODO : fix this bug - for i in range(len(id_list)): - # Get tree from samples - samples_tree = tree.getBlockTreeFromSamples(project_id, [sample_path[i]]) - - # Convert tree to array - data_array = _tree_to_array(samples_tree) - - # Convert array to dict - data[id_list[i]] = data_array[0] - - return data - - -def _tree_to_array(tree): - data_array = [] - for block in tree: - data_array += _block_to_array_recur(block) - return data_array - - -def _get_block_values(block): - # Adding the block name into the values - values = [block["name"]] - - # store all key-values into an array - for data_type in DATA_TYPES: - if data_type in block: - for key in range(len(block[data_type])): - values.append(block[data_type][key]) - - return values - - -def _block_to_array_recur(block): - # Getting bloc values - values = _get_block_values(block) - if "childrenInfoList" not in block or len(block["childrenInfoList"]) == 0: - return [values] - - else: - # Getting all child values - child_values = [] - for child_block in block["childrenInfoList"]: - child_values += _block_to_array_recur(child_block) - # child_values.append(_block_to_array_recur(child_block)) - - # Child values : [[1,2,3], [4,5,6], [7,8,9]] - # values : [10, 11, 12] - # Goal: [[10, 11, 12, 1, 2, 3], [10, 11, 12, 4, 5, 6], [10, 11, 12, 7, 8, 9]] - - # Adding the block name into the values - ret = [None] * len(child_values) - - # Adding the block values to the children values - for i in range(len(child_values)): - ret[i] = values + child_values[i] - - return ret - - -# def projectSamplesGenerator(projectId): -# """ -# Generator used to iterate over all samples in a project. -# Used by the 'createSelectionFromRequest' method -# """ - -# # Get the project block structure -# projectBlockStructure = projects.get_project_block_level_info(projectId) -# sampleLevel = len(projectBlockStructure) - 1 - -# rootBlocks = utils.listDir(DATA_PATH + projectId + "/blocks/") -# for rootBlock in rootBlocks: -# path = DATA_PATH + projectId + "/blocks/" + rootBlock + "/" -# yield from yieldSample(path, 0, [], sampleLevel, projectBlockStructure) -# print("end") - - -# def yieldSample(path, level, sampleInfo, sampleLevel, blockLevelInfo): -# # TODO : optimizations : add in parameters the block that we need to open -# blockInfo = utils.readJsonFile(path + "info.json") -# sampleInfo.append(getBlockInfo(blockLevelInfo[level], blockInfo)) - -# if level == sampleLevel: -# # merge the dict into one -# yield {k: v for x in sampleInfo for k, v in x.items()}, blockInfo["id"] -# else: -# childrenBlockNames = utils.listDir(path) -# for name in childrenBlockNames: -# yield from yieldSample(path + name + "/", -# level + 1, sampleInfo, sampleLevel, blockLevelInfo) -# del sampleInfo[-1] diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py deleted file mode 100644 index 8281886ef..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py +++ /dev/null @@ -1,117 +0,0 @@ -import os -import ujson as json - -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( - pythonModuleUtils, - projects, -) - -DATA_PATH = pythonModuleUtils.DATA_PATH - -# Selections - - -def create_selection(project_id, selection_name, sample_ids, request_id=None): - selection_id = pythonModuleUtils.clean_filename(selection_name) - if len(selection_id) == 0: - selection_id = pythonModuleUtils.timeNow() - - nbS = 1 - while selection_exist(project_id, selection_id): - selection_id = pythonModuleUtils.clean_filename(selection_name) + "_" + str(nbS) - nbS += 1 - - # Save the selection - selectionInfoFilePath = ( - DATA_PATH + project_id + "/selections/" + selection_id + "/info.json" - ) - now = pythonModuleUtils.timeNow() - - selectionInfo = { - "id": selection_id, - "name": selection_name, - "filePath": selectionInfoFilePath, - "creationDate": now, - "updateDate": now, - "samples": sample_ids, - } - - if request_id is not None: - selectionInfo["requestId"] = request_id - - os.mkdir(DATA_PATH + project_id + "/selections/" + selection_id) - pythonModuleUtils.writeJsonFile(selectionInfoFilePath, selectionInfo) - projects.update_project(project_id) - return selectionInfo - - -def get_selections(project_id): - # Get selections - selections = [] - for selection_id in get_selection_ids(project_id): - selections.append(get_selection(project_id, selection_id)) - return selections - - -def get_selection_ids(project_id): - return os.listdir(DATA_PATH + project_id + "/selections/") - - -def selection_exist(project_id, selectionId): - return os.path.exists(DATA_PATH + project_id + "/selections/" + selectionId) - - -def get_selection(project_id, selectionId): - with open( - DATA_PATH + project_id + "/selections/" + selectionId + "/info.json" - ) as json_file: - data = json.load(json_file) - ret = { - "id": data["id"], - "name": data["name"], - "filePath": data["filePath"], - "creationDate": data["creationDate"], - "updateDate": data["updateDate"], - "nbSamples": len(data["samples"]), - } - - # Add the request Id if it exist - if "requestId" in data: - ret["requestId"] = data["requestId"] - - return ret - - -def get_selection_id_list(project_id, selectionId): - if not selection_exist(project_id, selectionId): - raise Exception("Selection " + selectionId + " doesn't exist") - - with open( - DATA_PATH + project_id + "/selections/" + selectionId + "/info.json" - ) as json_file: - data = json.load(json_file) - return data["samples"] - - -# def getSelectionsSamples(project_id, selectionIds: list, intersection: bool) -> set: -# if len(selectionIds) == 0: -# return [] - -# samples = set(get_selection_id_list(project_id, selectionIds[0])) -# for selectionId in selectionIds[1:]: -# if intersection: # intersection of the selections samples -# samples.intersection_update( -# get_selection_id_list(project_id, selectionId)) - -# if len(samples) == 0: -# return [] -# else: # Union of the model results samples -# samples = samples.union( -# get_selection_id_list(project_id, selectionId)) - -# return samples - - -def delete_selection(project_id, selection_id): - pythonModuleUtils.deleteDir(DATA_PATH + project_id + "/selections/" + selection_id) - projects.update_project(project_id) diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py deleted file mode 100644 index 13f6ce476..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py +++ /dev/null @@ -1,109 +0,0 @@ -import os -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils, hash - -DATA_PATH = pythonModuleUtils.DATA_PATH - - -def getTagsIds(projectId): - try: - return os.listdir(DATA_PATH + projectId + "/tags") - except FileNotFoundError: - os.mkdir(DATA_PATH + projectId + "/tags") - return [] - - -def getTags(projectId): - tagIds = getTagsIds(projectId) - tags = [] - for tagId in tagIds: - tag = getTagById(projectId, tagId) - # Get the number of sample tagged - tag["nbSamples"] = len(tag["tags"].keys()) - # remove the tag values - tag.pop("tags", None) - tags.append(tag) - return tags - - -def getTagById(projectId, tagId): - if tagId not in getTagsIds(projectId): - return None - - return pythonModuleUtils.readJsonFile( - DATA_PATH + projectId + "/tags/" + tagId + "/info.json" - ) - - -def getTagByName(projectId, tagName): - for tagId in getTagsIds(projectId): - tag = getTagById(projectId, tagId) - if tag["name"] == tagName: - return tag - return None - - -def updateTag(projectId, tagName, tagHash): - # TODO change to tagId - # ParametersCheck - projectHashMap = hash.getHashmap(projectId) - - for sampleHash in tagHash.keys(): - if sampleHash not in projectHashMap: - return "SampleHash not found in the project samples", 404 - - tag = getTagByName(projectId, tagName) - if tag: - # Update tag - for sampleHash in tagHash.keys(): - if tagHash[sampleHash] == 0: - tag["tags"].pop(sampleHash, None) - else: - tag["tags"][sampleHash] = tagHash[sampleHash] - - tag["updateDate"] = pythonModuleUtils.timeNow() - pythonModuleUtils.writeJsonFile( - DATA_PATH + projectId + "/tags/" + tag["id"] + "/info.json", tag - ) - return tag, 200 - else: - # Create tag - # tag ID - tagId = pythonModuleUtils.clean_filename(tagName) - if len(tagId) == 0: - tagId = pythonModuleUtils.timeNow() - - nbTag = 1 - while tagId in getTagsIds(projectId): - tagId = pythonModuleUtils.clean_filename(tagName) + "_" + str(nbTag) - nbTag += 1 - - # Save tag - os.mkdir(DATA_PATH + projectId + "/tags/" + tagId) - now = pythonModuleUtils.timeNow() - tagInfo = { - "id": tagId, - "name": tagName, - "tags": tagHash, - "creationDate": now, - "updateDate": now, - } - - pythonModuleUtils.writeJsonFile( - DATA_PATH + projectId + "/tags/" + tagId + "/info.json", tagInfo - ) - - return tagInfo, 200 - - -def deleteTag(projectId, tagId): - pythonModuleUtils.deleteDir(DATA_PATH + projectId + "/tags/" + tagId) - - -def getSamplesHash(projectId, tagId, tagValue): - tag = getTagById(projectId, tagId) - hash = [] - for sampleHash in tag["tags"].keys(): - if tag["tags"][sampleHash] == tagValue: - hash.append(sampleHash) - - return hash diff --git a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py b/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py deleted file mode 100644 index 08fec8241..000000000 --- a/build/lib/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py +++ /dev/null @@ -1,332 +0,0 @@ -import ujson as json -import os - -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( - pythonModuleUtils, - projects, - models, - hash, -) - -DATA_PATH = pythonModuleUtils.DATA_PATH -DATA_TYPES = pythonModuleUtils.DATA_TYPES - -# def getFirstLevelBlock(projectId, blockId): -# blockList = os.listdir(DATA_PATH + projectId + '/blocks') - -# if (blockId not in blockList): -# return -1 - -# with open(DATA_PATH + projectId + '/blocks/' + blockId + '/info.json')\ -# as json_file: -# data = json.load(json_file) - -# return data - - -# def getBlockTree(projectId, path, depth): - -# childrenBlockNames = utils.listDir(path) -# childrenInfo = [] - -# if depth > 0: -# # Get the children blocks info -# for name in childrenBlockNames: -# childrenInfo.append(getBlockTree( -# projectId, path + name + "/", depth - 1)) - -# with open(path + 'info.json') as json_file: -# data = json.load(json_file) -# data['childrenInfoList'] = childrenInfo - -# return data - - -def add_block_tree(projectId, data): - # Loading project block info - bli = projects.get_project_block_level_info(projectId) - - # going through the tree to check for error, store the block to add - blockToAdd = [] - - try: - for block in data["blockTree"]: - addBlockTree(projectId, block, bli, blockToAdd, 0, "") - except KeyError as e: - print(str(e)) - print("badInputTree") - return str(e), 403 - - if len(blockToAdd) == 0: - return "No block added", 201 - - # Store the blocks and the hash map - sampleLevel = len(bli) - 1 - hashToSave = {} - for block in blockToAdd: - if block["level"] == sampleLevel: - # Sample level, creating hash - sampleHash = hash.hash(block["path"]) - block["id"] = sampleHash - hashToSave[sampleHash] = block["path"] - - addBlock(projectId, block) - - # Save hashmap - hash.addToSampleHashmap(projectId, hashToSave) - - projects.update_project(projectId) - return str(len(blockToAdd)) + " added blocks" - - -def getBlockInfo(blockLevel, blockInfo): - """ - Convert the block info to fill the sampleInfo list with a colonName:value dict - """ - print("get block info") - print(blockLevel) - print(blockInfo) - ret = {} - ret[blockLevel["name"]] = blockInfo["name"] - - for dataType in pythonModuleUtils.DATA_TYPES: - if dataType in blockLevel: - for i, column in enumerate(blockLevel[dataType]): - ret[column["name"]] = blockInfo[dataType][i] - - return ret - - -def getBlockTreeFromSamples(projectId, samples: list): - blocksData = [] - addedBlocks = [] - - for samplePath in samples: - try: - sampleBlocksData, endLevel = __getBlockTreeFromSample( - projectId, samplePath, addedBlocks - ) - - if endLevel == 0: - # root block, should be here after added the first sample - blocksData.append(sampleBlocksData) - else: - # Not a root block, we need to find where to insert it - # First, find the root - cur = next( - block - for block in blocksData - if block["path"] in sampleBlocksData["path"] - ) - - # Then, find the level - for i in range(0, endLevel - 1): - cur = next( - child - for child in cur["childrenInfoList"] - if child["path"] in sampleBlocksData["path"] - ) - - cur["childrenInfoList"].append(sampleBlocksData) - except StopIteration: - # TODO : Find why this happens of certain projects - print("Warning, the sample " + samplePath + " doesn't have a root block") - - return blocksData - - -def __getBlockTreeFromSample(projectId, blockPath, addedBlocks): - """ - Go from the bottom to the top of a tree - if at the top or, if block already added, return - - """ - # Add the block we are in - addedBlocks.append(blockPath) - - with open( - DATA_PATH + projectId + "/blocks/" + blockPath + "/info.json" - ) as sampleData: - info = json.load(sampleData) - - if info["level"] == 0: - # Top of the tree, end of the recursively - return info, 0 - - if info["parentPath"] in addedBlocks: - # The block to the top is already added, there is no need to go further - return info, info["level"] - - # Climbing up the tree of one level - parentInfo, endLevel = __getBlockTreeFromSample( - projectId, info["parentPath"], addedBlocks - ) - - # Let's add our info to the parents - cur = parentInfo - - for i in range(endLevel, info["level"] - 1): - cur = cur["childrenInfoList"][0] - cur["childrenInfoList"] = [info] - - return parentInfo, endLevel - - -def addResultsToTree(projectId, tree: list, modelIds: list, commonOnly: bool) -> dict: - """ - Add, in the tree samples, the results from a model id list - """ - - # Load all the model results - modelResults = {} - for modelId in modelIds: - modelResults[modelId] = models.getModelResults(projectId, modelId) - - # Get the project block structure - proBs = projects.get_project_block_level_info(projectId) - sampleLevel = len(proBs) - 1 - - # Add in the samples the results - for rootBlock in tree: - __addResultsToABlock(rootBlock, modelResults, sampleLevel, commonOnly) - - return tree - - -def __addResultsToABlock(block, modelResults, sampleLevel, commonOnly): - if block["level"] == sampleLevel: - # Adding the results to the sample - block["results"] = {} - for modelId in modelResults: - # If no more than 1 model and commonOnly, no need to check if - # sample exist in tree - if ( - commonOnly - or len(modelResults) == 1 - or block["path"] in modelResults[modelId] - ): - block["results"][modelId] = modelResults[modelId][block["path"]] - return - - for child in block["childrenInfoList"]: - __addResultsToABlock(child, modelResults, sampleLevel, commonOnly) - - -# Add samples to a tree -def addBlockTree(projectId, block, blockLevelInfo, blockToAdd, level, parentPath): - __checkBlockCompliant(block, level, blockLevelInfo) - - # check if block exist - data = findBlockInfo(projectId, parentPath + block["name"]) - if data is None: - # BLock doesn't exist - blockToAdd.append(__createBlock(projectId, block, level, parentPath)) - - path = parentPath + block["name"] + "/" - - if level < len(blockLevelInfo) - 1: - for child in block["childrenInfoList"]: - addBlockTree(projectId, child, blockLevelInfo, blockToAdd, level + 1, path) - - -def findBlockInfo(projectId, blockPath): - curPath = DATA_PATH + projectId + "/blocks/" + blockPath - - if not os.path.isdir(curPath): - return None - - with open(curPath + "/info.json", "r") as json_file: - data = json.load(json_file) - - return data - - -def __checkBlockCompliant(block, level, blockLevelInfo): - # Check if a block is correct (name exist, levelInfo coherence, etc) - - if "name" not in block: - raise KeyError("A block at level " + str(level) + " is missing his name") - - # TODO check name valid (no / & .) - - if "childrenInfoList" not in block and level < (len(blockLevelInfo) - 1): - raise KeyError( - "Block : " + block["name"] + " has no childrenInfoList properties" - ) - - if level < len(blockLevelInfo) - 1 and len(block["childrenInfoList"]) == 0: - raise KeyError( - "Block : " - + block["name"] - + " has no child block, the tree need to be complete" - ) - - levelInfo = blockLevelInfo[level] - - for type_ in DATA_TYPES: - if type_ in levelInfo and len(levelInfo[type_]) > 0: - if type_ not in block: - raise KeyError( - "At least one value of type " - + type_ - + " is required in the block : " - + levelInfo["name"] - ) - - if len(block[type_]) != len(levelInfo[type_]): - raise KeyError( - "Exactly " - + str(len(levelInfo[type_])) - + " " - + type_ - + " required in the block : " - + levelInfo["name"] - ) - - # TODO Implement column default - - for i, col in enumerate(levelInfo[type_]): - if col["type"] == "integer" and type(block[type_][i]) is str: - raise KeyError( - "Col " - + col["name"] - + " require an integer in the block : " - + levelInfo["name"] - ) - - -def __createBlock(projectId, block, level, parentPath): - blockPath = parentPath + block["name"] + "/" - - debiaiBlock = { - "id": block["name"], - "name": block["name"], - "path": blockPath, - "parentPath": parentPath, - "level": level, - # "creationDate": str(date.today()), - # "updateDate": str(date.today()), - # "version": "0.0.0", - # "metaDataList": {}, - } - - for type_ in DATA_TYPES: - if type_ in block: - debiaiBlock[type_] = block[type_] - - return debiaiBlock - - -def addBlock(projectId, block): - # create the block folder and his info.json file - try: - os.mkdir(DATA_PATH + projectId + "/blocks/" + block["path"]) - pythonModuleUtils.writeJsonFile( - DATA_PATH + projectId + "/blocks/" + block["path"] + "/info.json", block - ) - except FileExistsError: - print( - "Warning : The block " - + block["path"] - + " already exist, this is not supposed to append" - ) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py deleted file mode 100644 index 3ff8afb3a..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/WebDataProvider.py +++ /dev/null @@ -1,106 +0,0 @@ -from backend.modules.dataProviders.DataProvider import DataProvider -from backend.modules.dataProviders.webDataProvider.useCases.data import ( - get_project_id_list, - get_project_samples, -) -from backend.modules.dataProviders.webDataProvider.useCases.projects import ( - get_all_projects_from_data_provider, - get_single_project_from_data_provider, - delete_project, -) -from backend.modules.dataProviders.webDataProvider.useCases.models import ( - get_model_results, - get_models_info, - get_model_result_id, - delete_model, -) -import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections -from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status -from backend.modules.dataProviders.webDataProvider.cache.cache import Cache - - -# WebDataProvider class, allow to get data from a web data-provider -class WebDataProvider(DataProvider): - def __init__(self, url, name): - self.url = url - self._name = name - self.alive = None - - # Init cache - self.cache = Cache() - - @property - def name(self): - return self._name - - @property - def type(self): - return "Web" - - # Todo api call Info (new info) - def is_alive(self): - self.alive = True if get_status(self.url) is True else False - return self.alive - - def get_info(self): - return get_info(self.url) - - # ==== Projects ==== - def get_projects(self): - # Request method to get projects overview - # Return Arr[object{ id, name, nb_samples, nb_models, nb_selections, - # update_time, creation_time}] - return get_all_projects_from_data_provider(self.url, self.name) - - def get_project(self, id_project): - # Request method to get projects overview - # Return object{ id, name, nb_samples, nb_models, nb_selections, - # update_time, creation_time} - return get_single_project_from_data_provider(self.url, self.name, id_project) - - def delete_project(self, project_id): - return delete_project(self.url, project_id) - - def get_id_list(self, project_id, analysis, _from=None, _to=None): - # http Request on dp to get id list - # Return Arr[id] - return get_project_id_list( - self.url, self.cache, project_id, analysis, _from, _to - ) - - def get_samples(self, project_id, analysis, id_list): - # http Request get full sample - # Return object { id: [data]} - return get_project_samples(self.url, project_id, analysis, id_list) - - # ==== Selections ==== - def get_selections(self, project_id): - # Get selections on project - # Return arr[object{ id, name, creation_time, nb_samples}] - return useCaseSelections.get_project_selections(self.url, project_id) - - def get_selection_id_list(self, project_id, selection_id): - return useCaseSelections.get_id_list_from_selection( - self.url, self.cache, project_id, selection_id - ) - - def create_selection(self, project_id, name, id_list, request_id=None): - return useCaseSelections.create_selection( - self.url, project_id, name, id_list, request_id - ) - - def delete_selection(self, project_id, selection_id): - return useCaseSelections.delete_selection(self.url, project_id, selection_id) - - # ==== Models ==== - def get_models(self, project_id): - return get_models_info(self.url, project_id) - - def get_model_results_id_list(self, project_id, model_id): - return get_model_result_id(self.url, self.cache, project_id, model_id) - - def get_model_results(self, project_id, model_id, sample_list): - return get_model_results(self.url, project_id, model_id, sample_list) - - def delete_model(self, project_id, model_id): - return delete_model(self.url, project_id, model_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/cache/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py b/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py deleted file mode 100644 index 68b2c05f7..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/cache/cache.py +++ /dev/null @@ -1,90 +0,0 @@ -# This service is used to cache data from the web data provider -# It's used to avoid multiple requests to the web data provider -# It will mainly save the id list of samples, selections and models results -# The ability to cache and the time to live are configurable in the config file - -from backend.config.init_config import get_config -from cacheout import Cache as CacheoutCache - - -class Cache: - def __init__(self): - # Get config - self.config = get_config() - - self.cache_enabled = self.config["WEB_DATA_PROVIDERS_CONFIG"]["cache"] - self.cache_ttl = self.config["WEB_DATA_PROVIDERS_CONFIG"]["cache_duration"] - - # Init cache - self.project_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) - # __: [...] - # _total: [...] - - self.selection_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) - # _: [...] - - self.model_result_id_list_cache = CacheoutCache(maxsize=256, ttl=self.cache_ttl) - # _: [...] - - # Project id list - def get_key(self, id_project, _from=None, _to=None): - if _from is None or _to is None: - return "{}_total".format(id_project) - else: - return "{}_{}_{}".format(id_project, _from, _to) - - def get_id_list(self, id_project, _from=None, _to=None): - if not self.cache_enabled: - return None - - key = self.get_key(id_project, _from, _to) - - return self.project_id_list_cache.get(key) - - def set_id_list(self, id_project, id_list, _from=None, _to=None): - if not self.cache_enabled: - return - - key = self.get_key(id_project, _from, _to) - - self.project_id_list_cache.set(key, id_list) - - # Selection id list - def get_selection_key(self, id_project, id_selection): - return "{}_{}".format(id_project, id_selection) - - def get_selection_id_list(self, id_project, id_selection): - if not self.cache_enabled: - return None - - key = self.get_selection_key(id_project, id_selection) - - return self.selection_id_list_cache.get(key) - - def set_selection_id_list(self, id_project, id_selection, id_list): - if not self.cache_enabled: - return - - key = self.get_selection_key(id_project, id_selection) - - self.selection_id_list_cache.set(key, id_list) - - # Model result id list - def get_model_result_key(self, id_project, id_model): - return "{}_{}".format(id_project, id_model) - - def get_model_result_id_list(self, id_project, id_model): - if not self.cache_enabled: - return None - - key = self.get_model_result_key(id_project, id_model) - - return self.model_result_id_list_cache.get(key) - - def set_model_result_id_list(self, id_project, id_model, id_list): - if not self.cache_enabled: - return - - key = self.get_model_result_key(id_project, id_model) - - self.model_result_id_list_cache.set(key, id_list) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/http/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py b/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py deleted file mode 100644 index 1b32e7fa4..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/http/api.py +++ /dev/null @@ -1,222 +0,0 @@ -import requests -import json -from backend.modules.dataProviders.DataProviderException import DataProviderException - - -# Todo : change info if in not alive anymore -def get_status(url): - try: - r = requests.get(url + "/info") - - if r.status_code != 200: - return False - - # Check content type - content = get_http_response(r) - - if content is None: - return False # we are expecting a dict - - return True - - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return False - except requests.exceptions.InvalidURL: - raise DataProviderException("Invalid URL", 400) - - -def get_info(url): - try: - r = requests.get(url + "/info") - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -# ==== Projects ==== -def get_projects(url): - try: - r = requests.get(url + "/projects") - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def get_project(url, id_project): - try: - r = requests.get(url + "/projects/" + id_project) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def get_id_list(url, id_project, analysis, _from=None, _to=None): - try: - if _from is not None and _to is not None: - url = ( - url - + "/projects/" - + id_project - + "/data-id-list?from={}&to={}&analysisId={}".format( - _from, _to, analysis["id"] - ) - ) - else: - url = ( - url - + "/projects/" - + id_project - + "/data-id-list?analysisId={}".format(analysis["id"]) - ) - - if analysis["start"]: - url += "&analysisStart={}".format(str(analysis["start"]).lower()) - if analysis["end"]: - url += "&analysisEnd={}".format(str(analysis["end"]).lower()) - - r = requests.get(url) - - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - print( - "Error getting data id list from {} on project {}".format(url, id_project) - ) - return [] - - -def get_samples(url, id_project, analysis, id_list): - try: - rurl = ( - url - + "/projects/{}/data?analysisId={}&analysisStart={}&analysisEnd={}".format( - id_project, - analysis["id"], - str(analysis["start"]).lower(), - str(analysis["end"]).lower(), - ) - ) - - r = requests.post(rurl, json=id_list) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - raise Exception( - "Could not get the data provider {} data for project {}".format( - url, id_project - ) - ) - - -def delete_project(url, id_project): - try: - r = requests.delete(url + "/projects/" + id_project) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -# ==== Selections ==== -def get_selections(url, id_project): - try: - r = requests.get(url + "/projects/{}/selections".format(id_project)) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def post_selection(url, id_project, data): - try: - r = requests.post(url + "/projects/{}/selections".format(id_project), json=data) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def get_selection_id(url, id_project, id_selection): - try: - r = requests.get( - url - + "/projects/{}/selections/{}/selected-data-id-list".format( - id_project, id_selection - ) - ) - - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def delete_selection(url, id_project, id_selection): - try: - r = requests.delete( - url + "/projects/{}/selections/{}".format(id_project, id_selection) - ) - - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -# ==== Models ==== -def get_models(url, id_project): - try: - r = requests.get(url + "/projects/{}/models".format(id_project)) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def get_model_result_id_list(url, project_id, model_id): - try: - r = requests.get( - url - + "/projects/{}/models/{}/evaluated-data-id-list".format( - project_id, model_id - ) - ) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def get_model_result(url, id_project, id_model, id_sample_list): - try: - r = requests.post( - url + "/projects/{}/models/{}/results".format(id_project, id_model), - json=id_sample_list, - ) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -def delete_model(url, id_project, id_model): - try: - r = requests.delete(url + "/projects/{}/models/{}".format(id_project, id_model)) - return get_http_response(r) - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return None - - -# ==== Utils ==== -def get_http_response(response): - try: - if response.raise_for_status() is None: - return get_valid_response(response) - except requests.exceptions.HTTPError: - return get_error_response(response) - - -def get_valid_response(response): - if response.status_code == 204: - return True - try: - return response.json() - except json.decoder.JSONDecodeError: - return - - -def get_error_response(response): - if response.status_code == 500: - raise DataProviderException("Data Provider unexpected Error", 500) - - raise DataProviderException(response.text, response.status_code) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py deleted file mode 100644 index eb3ba8d63..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/data.py +++ /dev/null @@ -1,23 +0,0 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api - -# -# UseCase folder role is the middleware between class methods and http requests -# It's used to make all changes in data we took from DP and send it back to -# the class/controller -# - - -def get_project_id_list(url, cache, id_project, analysis, _from=None, _to=None): - id_list = cache.get_id_list(id_project, _from, _to) - - if id_list is None: - id_list = api.get_id_list(url, id_project, analysis, _from, _to) - cache.set_id_list(id_project, id_list, _from, _to) - - return id_list - - -def get_project_samples(url, id_project, analysis, id_list): - data = api.get_samples(url, id_project, analysis, id_list) - - return data diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py deleted file mode 100644 index 6b646b261..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/models.py +++ /dev/null @@ -1,58 +0,0 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.DataProviderException import DataProviderException - - -def get_models_info(url, project_id): - # Models - try: - models = api.get_models(url, project_id) - debiai_models = [] - for model_in in models: - if "id" not in model_in: - continue - model = { - "id": model_in["id"], - "metadata": None, - "creationDate": None, - } - - # Adding name and nbResults - model["name"] = model_in["name"] if "name" in model_in else model_in["id"] - if "nbResults" in model_in: - model["nbResults"] = model_in["nbResults"] - - # Adding metadata - if "metadata" in model_in: - model["metadata"] = model_in["metadata"] - - # Adding creationDate - if "creationDate" in model_in: - model["creationDate"] = model_in["creationDate"] - - debiai_models.append(model) - - return debiai_models - except DataProviderException: - # The route may not be implemented in the data provider - return [] - - -def get_model_result_id(url, cache, project_id, model_id): - # Todo : Add route to call Id results for a Model (DP) - # Todo : Add Some formatting if data has to change - - id_list = cache.get_model_result_id_list(project_id, model_id) - - if id_list is None: - id_list = api.get_model_result_id_list(url, project_id, model_id) - cache.set_model_result_id_list(project_id, model_id, id_list) - - return id_list - - -def get_model_results(url, project_id, model_id, sample_list): - return api.get_model_result(url, project_id, model_id, sample_list) - - -def delete_model(url, project_id, model_id): - return api.delete_model(url, project_id, model_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py deleted file mode 100644 index c3dad454b..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/projects.py +++ /dev/null @@ -1,131 +0,0 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api - -from backend.modules.dataProviders.webDataProvider.useCases.models import get_models_info -from backend.modules.dataProviders.webDataProvider.useCases.selections import ( - get_project_selections, -) - - -def get_all_projects_from_data_provider(url, name): - projects = api.get_projects(url) - project_list = [] - - if not projects: - return - - for project_id in projects: - if "nbSamples" not in projects[project_id]: - projects[project_id]["nbSamples"] = None - - if "nbModels" not in projects[project_id]: - projects[project_id]["nbModels"] = None - - if "nbSelections" not in projects[project_id]: - projects[project_id]["nbSelections"] = None - - if "name" not in projects[project_id]: - projects[project_id]["name"] = project_id - - if "creationDate" not in projects[project_id]: - projects[project_id]["creationDate"] = None - - if "updateDate" not in projects[project_id]: - projects[project_id]["updateDate"] = None - - project_list.append( - { - "id": project_id, - "dataProvider": name, - "name": projects[project_id]["name"], - "nbModels": projects[project_id]["nbModels"], - "nbSamples": projects[project_id]["nbSamples"], - "nbSelections": projects[project_id]["nbSelections"], - "creationDate": projects[project_id]["creationDate"], - "updateDate": projects[project_id]["updateDate"], - } - ) - - return project_list - - -def get_single_project_from_data_provider(url, data_provider_name, id_project): - project = api.get_project(url, id_project) - - # Check the project columns - project_columns = get_project_columns(project) - - # Add selections - selections = get_project_selections(url, id_project) - - # Add models - models = get_models_info(url, id_project) - - # Check nbSamples - if "nbSamples" in project: - nbSamples = project["nbSamples"] - else: - nbSamples = None - - # Check creationDate - if "creationDate" in project: - creationDate = project["creationDate"] - else: - creationDate = None - - # Check updateDate - if "updateDate" in project: - updateDate = project["updateDate"] - else: - updateDate = None - - # Converting views to DebiAI projects - return { - "id": id_project, - "name": project["name"] if "name" in project else id_project, - "dataProvider": data_provider_name, - "columns": project_columns, - "resultStructure": project["expectedResults"], - "nbModels": len(models), - "nbSamples": nbSamples, - "nbSelections": len(selections), - "creationDate": creationDate, - "updateDate": updateDate, - "selections": selections, - "models": models, - } - - -def get_project_columns(project): - project_columns = [] - # Expected project["columns"] example : - # [ - # { "name": "storage", "category": "other" }, - # { "name": "age", "category": "context" }, - # { "name": "path", "category": "input", group: "image" }, - # { "name": "label", "category": "groundtruth", group: "image" }, - # { "name": "type" }, # category is not specified, it will be "other" - # ] - if "columns" in project: - for column in project["columns"]: - col = {"name": column["name"]} - - if "category" in column: - col["category"] = column["category"] - else: - col["category"] = "other" - - if "type" in column: - col["type"] = column["type"] - else: - col["type"] = "auto" - - if "group" in column: - col["group"] = column["group"] - - project_columns.append(col) - - return project_columns - - -def delete_project(url, project_id): - api.delete_project(url, project_id) diff --git a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py b/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py deleted file mode 100644 index 794328d4c..000000000 --- a/build/lib/backend/modules/dataProviders/webDataProvider/useCases/selections.py +++ /dev/null @@ -1,61 +0,0 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.DataProviderException import DataProviderException - - -def get_project_selections(url, project_id): - try: - selections = api.get_selections(url, project_id) - - if selections is None: - print(f"Error: No selections found for project {project_id} on {url}") - raise DataProviderException("No selections found", 404) - - debiai_selections = [] - for selection in selections: - if "id" not in selection or selection["id"] is None: - print(f"Error: No id for selection: {selection}") - raise DataProviderException( - "An id is missing in the given selection", 400 - ) - - selection_to_add = { - "name": selection["name"] if "name" in selection else selection["id"], - "id": selection["id"], - } - - if "nbSamples" in selection: - selection_to_add["nbSamples"] = selection["nbSamples"] - if "creationDate" in selection: - selection_to_add["creationDate"] = selection["creationDate"] - if "updateDate" in selection: - selection_to_add["updateDate"] = selection["updateDate"] - - debiai_selections.append(selection_to_add) - return debiai_selections - - except DataProviderException: - # The route may not be implemented in the data provider - return [] - - -def get_id_list_from_selection(url, cache, project_id, selection_id): - id_list = cache.get_selection_id_list(project_id, selection_id) - - if id_list is None: - id_list = api.get_selection_id(url, project_id, selection_id) - cache.set_selection_id_list(project_id, selection_id, id_list) - - return id_list - - -def create_selection(url, project_id, name, id_list, request_id): - data = {"idList": id_list, "name": name} - - if request_id is not None: - data["request"]: request_id - - return api.post_selection(url, project_id, data) - - -def delete_selection(url, project_id, selection_id): - return api.delete_selection(url, project_id, selection_id) diff --git a/build/lib/backend/modules/exportMethods/__init__.py b/build/lib/backend/modules/exportMethods/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/exportMethods/exportClass.py b/build/lib/backend/modules/exportMethods/exportClass.py deleted file mode 100644 index f5eb49284..000000000 --- a/build/lib/backend/modules/exportMethods/exportClass.py +++ /dev/null @@ -1,43 +0,0 @@ -import uuid - -############################################################################# -# -# Export type and method classes -# -# Those class are used to export data from a specific export type -# -############################################################################# - - -class ExportType: - name = None - parameters_definition = [] - export_method_class = None - - def to_dict(self): - return {"name": self.name, "parameters": self.parameters_definition} - - -class ExportMethod: - id = None - type = None # ExportType object - name = None - parameters = [] - - deletable = False - - def __init__(self, type, name, parameters): - self.id = uuid.uuid4().hex - self.type = type - self.name = name - self.parameters = parameters - - def to_dict(self): - return { - "id": self.id, - "type": self.type.name, - "name": self.name, - "parameters": self.parameters, - "parameterNames": self.type.parameters_definition, - "deletable": self.deletable, - } diff --git a/build/lib/backend/modules/exportMethods/exportUtils.py b/build/lib/backend/modules/exportMethods/exportUtils.py deleted file mode 100644 index d7502bcd6..000000000 --- a/build/lib/backend/modules/exportMethods/exportUtils.py +++ /dev/null @@ -1,200 +0,0 @@ -from backend.config.init_config import get_config -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException -import time - -from backend.modules.exportMethods.methods.kafkaUtils import KafkaExportType -from backend.modules.exportMethods.methods.postUtils import PostExportType - -############################################################################# -# -# Export utils -# -# DebiAI allows to export data or selections to other services with -# different methods. -# This utils load and store the methods for all the projects -# -############################################################################# - -# The export types are the different types of export methods that we can create -# They are used to create the export methods -export_types = [KafkaExportType(), PostExportType()] - -# The export methods are the different methods created from the types -# They we can used to export data -export_methods = [] - - -# Export types -def get_export_type(typeName): - return [type for type in export_types if type.name == typeName][0] - - -def type_exist(typeName): - return typeName in [type.name for type in export_types] - - -# Export utils -def get_export_methods(): - # Return all the export methods as a list of dictionaries - return [method.to_dict() for method in export_methods] - - -def get_export_method(methodId): - # Check the method id - if not method_exist(methodId): - raise Exception("Export method " + methodId + " not found") - - return [method for method in export_methods if method.id == methodId][0] - - -def method_exist(methodId): - return methodId in [method.id for method in export_methods] - - -def load_export_methods(): - global export_methods - print("================== EXPORT METHODS ==================") - - # Load the export methods from the config file - config = get_config() - - if "EXPORT_METHODS_LIST" in config: - print(" - Loading export methods from config file") - config_export_methods = config["EXPORT_METHODS_LIST"] - - for method in config_export_methods: - print( - " Adding method " + method, "[", config_export_methods[method], "]" - ) - - try: - parameters = config_export_methods[method].split(",") - if len(parameters) == 0: - raise "method " + method + " has no parameters, aborting" - - # Trim parameters - for i in range(len(parameters)): - parameters[i] = "".join(parameters[i].rstrip().lstrip()) - - export_type_name = parameters[0] - export_method = create_export_method( - method, export_type_name, parameters[1:] - ) - - if config["EXPORT_METHODS_CONFIG"]["deletion"]: - # The export method created from the config file are deletable - export_method.deletable = True - - export_methods.append(export_method) - except Exception as e: - print("Error while configuring method " + method + ": " + str(e)) - - if len(export_methods) == 0: - print(" No export method configured") - - -def add_export_method(data): - # Check the method type - if not type_exist(data["type"]): - raise Exception("Method type " + data["type"] + " not found") - - export_method = create_export_method(data["name"], data["type"], data["parameters"]) - - config = get_config() - if config["EXPORT_METHODS_CONFIG"]["deletion"]: - # The export method created from the config file are deletable - export_method.deletable = True - - export_methods.append(export_method) - - return export_method.to_dict() - - -def create_export_method(name, type, parameters): - # Check the method type - if not type_exist(type): - raise Exception( - "Export type '" - + type - + "' isn't supported, only " - + str([type.name for type in export_types]) - + " are supported" - ) - - # Get the export type - export_type = get_export_type(type) - - # Create the method - return export_type.export_method_class(name, parameters) - - -def delete_export_method(method_id): - global export_methods - - # Check the method id - if not method_exist(method_id): - raise Exception("The export method wasn't found") - - # Delete the method - export_methods = [method for method in export_methods if method.id != method_id] - return "method " + method_id + " deleted" - - -# Export data -def exportSelection(dataProviderId, projectId, data): - method_id = data["exportMethodId"] - - # Check the method id - if not method_exist(method_id): - raise Exception("method " + method_id + " not found") - - export_method = get_export_method(method_id) - - # Creation of the data selection to export - try: - data_provider = data_provider_manager.get_single_data_provider(dataProviderId) - project = data_provider.get_project(projectId) - except DataProviderException as e: - return e.message, e.status_code - - id_list = [] - - for id in data["sampleHashList"]: - id_list.append({"id": id}) - - data_to_export = { - "origin": "DebiAI", - "type": "selection", - "projectId": projectId, - "data_provider_id": dataProviderId, - "selection_name": data["selectionName"], - "date": time.time(), - "sample_ids": id_list, - } - - # Project name - if "name" in project: - data_to_export["project_name"] = project["name"] - - # Annotation extra value - if "annotationValue" in data and data["annotationValue"] != "": - data_to_export["value"] = data["annotationValue"] - - # Export the data - export_method.export(data_to_export) - - return "data exported" - - -def exportData(method_id, data): - # Check the method id - if not method_exist(method_id): - raise Exception("method " + method_id + " not found") - - export_method = get_export_method(method_id) - - # Export the data - export_method.export(data) - - return "data exported" diff --git a/build/lib/backend/modules/exportMethods/methods/__init__.py b/build/lib/backend/modules/exportMethods/methods/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py b/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py deleted file mode 100644 index 09c708df1..000000000 --- a/build/lib/backend/modules/exportMethods/methods/kafkaUtils.py +++ /dev/null @@ -1,71 +0,0 @@ -from kafka import KafkaProducer -from backend.modules.exportMethods.exportClass import ExportType, ExportMethod -import json - -############################################################################# -# -# Kafka export method -# -# Connect to a kafka server on init and send data to a topic -# -############################################################################# - - -class KafkaExportType(ExportType): - def __init__(self): - super().__init__() - - self.name = "kafka" - # Expected parameters: [server, topic] - self.parameters_definition = ["server", "topic"] - - self.export_method_class = KafkaExportMethod - - -class KafkaExportMethod(ExportMethod): - up = False - - def __init__(self, name, parameters): - super().__init__(KafkaExportType(), name, parameters) - - # Expected parameters: [server, topic] - # Check parameters - if len(parameters) != 2: - raise Exception( - "Kafka export type requires 2 parameters : server and topic" - ) - - # Create producer - self.server = parameters[0] - self.topic = parameters[1] - - # Create Kafka producer - try: - self.producer = KafkaProducer( - bootstrap_servers=self.server, - value_serializer=lambda v: json.dumps(v).encode("utf-8"), - ) - self.up = True - except Exception as e: - print("Kafka producer creation failed : " + str(e)) - print("server : '" + self.server + "'") - raise Exception( - "Kafka producer creation on server '" - + self.server - + "' failed with error : " - + str(e) - ) - - def export(self, data): - print("Kafka export method : Sending data to kafka", self.server, self.topic) - print(data) - - if not self.up: - raise Exception("Kafka producer is not up") - - try: - print(self.producer.send(self.topic, data)) - print("Kafka export method : Data sent") - except Exception as e: - print("Kafka export method : Error sending data to kafka", e) - raise "Kafka export method : Error sending data to kafka" diff --git a/build/lib/backend/modules/exportMethods/methods/postUtils.py b/build/lib/backend/modules/exportMethods/methods/postUtils.py deleted file mode 100644 index a37d18aff..000000000 --- a/build/lib/backend/modules/exportMethods/methods/postUtils.py +++ /dev/null @@ -1,59 +0,0 @@ -from backend.modules.exportMethods.exportClass import ExportType, ExportMethod -import requests - -############################################################################# -# -# HTTP - Post export method -# -# Send data through a post request to a server -# -############################################################################# - - -class PostExportType(ExportType): - def __init__(self): - super().__init__() - - self.name = "post" - self.parameters_definition = ["url"] - - self.export_method_class = PostExportMethod - - -class PostExportMethod(ExportMethod): - up = False - - def __init__(self, name, parameters): - super().__init__(PostExportType(), name, parameters) - - # Expected parameters: [url] - # Check parameters - if len(parameters) != 1: - raise Exception("Post export type requires 1 parameter : the url") - - self.url = parameters[0] - - # Check url - if not self.url.startswith("http://") and not self.url.startswith("https://"): - raise Exception( - "Url '" + self.url + "' must start with http:// or https://" - ) - - self.up = True - - def export(self, data): - print("Post export method: Sending data to '" + self.url + "'") - - if not self.up: - raise Exception("Can't send data to '" + self.url + "'") - - try: - # Send data - r = requests.post(self.url, json=data) - r.raise_for_status() - print("Post export method : Data sent") - except Exception as e: - print("Post export method : Error sending post request", e) - raise Exception( - "Post export method : Error sending post request on url" + str(e) - ) diff --git a/build/lib/backend/server.py b/build/lib/backend/server.py deleted file mode 100644 index 0c3e2210c..000000000 --- a/build/lib/backend/server.py +++ /dev/null @@ -1,5 +0,0 @@ -from backend.backend import start_server - - -def run(): - start_server() diff --git a/build/lib/backend/swagger.yaml b/build/lib/backend/swagger.yaml deleted file mode 100644 index a6cb5c5b4..000000000 --- a/build/lib/backend/swagger.yaml +++ /dev/null @@ -1,1914 +0,0 @@ -swagger: "2.0" -info: - version: 0.27.1 - title: DebiAI_BACKEND_API - description: DebiAI backend api - contact: - email: debiai@irt-systemx.fr - license: - name: Apache 2.0 - url: https://www.apache.org/licenses/LICENSE-2.0.html -paths: - /version: - get: - summary: Ping to check if the backend is running - operationId: backend.controller.projects.ping - responses: - 200: - description: The server is online - - # Data providers - /data-providers: - get: - summary: Get data providers list and status - tags: [Data Providers] - operationId: backend.controller.dataProviders.get_data_providers - responses: - 200: - description: List of data providers - schema: - type: array - items: - $ref: "#/definitions/dataProvider" - - post: - summary: Add data provider to data providers list - tags: [Data Providers] - operationId: backend.controller.dataProviders.post_data_providers - parameters: - - name: data - in: body - schema: - type: object - required: - - name - - type - properties: - name: - type: string - description: The name of the data Provider - type: - type: string - description: Type of the data Provider (Web) - url: - type: string - description: The url of the new data Provider if type is Web - responses: - 204: - description: Data provider added to the list - 400: - description: Bad request on data - - /data-providers/{dataProviderId}: - delete: - summary: Delete data providers from the list - tags: [Data Providers] - operationId: backend.controller.dataProviders.delete_data_providers - parameters: - - name: dataProviderId - in: path - type: string - required: true - responses: - 204: - description: Data provider deleted - 400: - description: The Data provider id must not be null - 404: - description: The data provider doesn't exist - - get: - summary: Get general informations about a data provider, like his version or the max number sample for each type of request - tags: [Data Providers] - operationId: backend.controller.dataProviders.get_data_provider_info - parameters: - - name: dataProviderId - in: path - type: string - required: true - responses: - 200: - description: The info of the data provider - schema: - type: object - required: - - version - properties: - version: - type: string - maxSampleIdByRequest: - type: integer - maxSampleDataByRequest: - type: integer - maxResultByRequest: - type: integer - - canDelete: - type: object - description: Information about what can be deleted by DebiAI - properties: - projects: - type: boolean - default: true - selections: - type: boolean - default: true - models: - type: boolean - default: true - 400: - description: The Data provider id must not be null - 404: - description: The data provider doesn't exist - - # Projects - /projects: - get: - summary: Get the projects overview - tags: [Project] - operationId: backend.controller.projects.get_projects - responses: - 200: - description: List of project overviews - schema: - type: array - items: - $ref: "#/definitions/projectOverview" - - post: - summary: Post a new project - tags: [Project] - operationId: backend.controller.pythonModuleDp.post_project - parameters: - - name: data - in: body - schema: - type: object - required: - - projectName - properties: - projectName: - type: string - description: The project name - blockLevelInfo: - type: array - items: - $ref: "#/definitions/blockLevelInfo" - description: List of the block level info - - responses: - 200: - description: project created, the project ID is returned - schema: - type: object - required: - - projectId - properties: - projectId: - type: string - 400: - description: The project name must not be null - 401: - description: The project name is too long - 402: - description: The project name contain invalid characters - 403: - description: A project with the same name already exist - - /data-providers/{dataProviderId}/projects: - get: - summary: Get the projects overview for a data provider - tags: [Project] - operationId: backend.controller.projects.get_data_providers_project - parameters: - - name: dataProviderId - in: path - type: string - required: true - responses: - 200: - description: List of project overviews - schema: - type: array - items: - $ref: "#/definitions/projectOverview" - - /data-providers/{dataProviderId}/projects/{projectId}: - get: - summary: Get project name, nb of models & nb of selections (overviews of a project) - tags: [Project] - operationId: backend.controller.projects.get_project - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - responses: - 200: - description: project - schema: - $ref: "#/definitions/project" - - delete: - summary: remove a project from ID - tags: [Project] - operationId: backend.controller.projects.delete_project - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - responses: - 200: - description: project deleted - 404: - description: project doesn't exist - - /data-providers/{dataProviderId}/projects/{projectId}/dataIdList: - post: - summary: Get the project data id list - tags: [Project] - operationId: backend.controller.projects.get_data_id_list - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: requestParameters - in: body - required: true - schema: - type: object - required: - - analysis - - from - - to - properties: - from: - type: integer - description: The index of the first data to return - x-nullable: true - to: - type: integer - description: The index of the last data to return - x-nullable: true - analysis: - type: object - required: - - id - properties: - id: - type: string - description: Id of the analysis - start: - type: boolean - description: If true, this is the first request of the analysis - end: - type: boolean - description: If true, this is the last request of the analysis - - responses: - 200: - description: project - schema: - $ref: "#/definitions/project" - - # BlockLevels - /data-providers/{dataProviderId}/projects/{projectId}/blocklevels: - post: - summary: add a new data blocks level structure - tags: [Project] - operationId: backend.controller.pythonModuleDp.post_block_levels - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: block_levels - in: body - schema: - type: array - items: - type: object - properties: - name: - type: string - groundTruth: - type: array - items: - type: object - properties: - name: - type: string - type: - type: string - inputs: - type: array - items: - type: object - properties: - name: - type: string - type: - type: string - contexts: - type: array - items: - type: object - properties: - name: - type: string - type: - type: string - minItems: 1 - required: true - responses: - 200: - schema: - type: object - description: Block tructure added, the block structure is returned - - /data-providers/{dataProviderId}/projects/{projectId}/resultsStructure: - post: - summary: add a new expected results structure - tags: [Project] - operationId: backend.controller.pythonModuleDp.post_resultsStructure - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: resultStructure - in: body - schema: - type: array - items: - type: object - required: - - name - - type - properties: - name: - type: - - number - - string - type: - type: string - description: text, number or bool - default: - type: - - number - - string - - boolean - group: - type: string - description: Optional group name, used to group the results columns in the UI - required: true - responses: - 200: - schema: - type: object - description: Results structure added, the resultStructure is returned - 404: - description: The project does not exist - 403: - description: The result structure already exist - - # Models - /data-providers/{dataProviderId}/projects/{projectId}/models: - post: - summary: add a model - tags: [Model] - operationId: backend.controller.models.post_model - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: data - in: body - schema: - type: object - required: - - name - properties: - name: - type: string - metadata: - type: object - description: Model metadata - key value list - required: true - responses: - 200: - description: Model added - 409: - description: Warning - Model already exist - 402: - description: Model name contain invalid characters - 404: - description: Project not found - - /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}: - get: - summary: Get a model results id list - tags: [Model] - operationId: backend.controller.models.get_model_id_list - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: modelId - in: path - type: string - required: true - responses: - 200: - description: model id list - schema: - type: array - items: - type: string - 404: - description: model or project doesn't exist - - delete: - summary: remove a model - tags: [Model] - operationId: backend.controller.models.delete_model - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: modelId - in: path - type: string - required: true - responses: - 200: - description: model deleted - 404: - description: model or project doesn't exist - - ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/resultsDict - : post: - summary: Add a results to a model - tags: [Model] - operationId: backend.controller.pythonModuleDp.add_results_dict - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: modelId - in: path - type: string - required: true - - name: data - description: Tree object with existing block references, the end of the tree need to include the expected results in the block structure - in: body - required: true - schema: - type: object - required: - - results - properties: - results: - type: object - expected_results_order: - description: order of the given results array, by default is the project result structure - type: array - - responses: - 200: - description: model results added - 403: - description: Block not found - 404: - description: model or project doesn't exist - - ? /data-providers/{dataProviderId}/projects/{projectId}/models/{modelId}/getModelResults - : post: - summary: Get the model results from a sample list - tags: [Model] - operationId: backend.controller.models.get_results - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: modelId - in: path - type: string - required: true - - name: data - in: body - required: true - schema: - type: object - required: - - sampleIds - properties: - sampleIds: - description: List of sample ID - items: - type: [string, integer, number] - responses: - 200: - description: model results - schema: - type: object - additionalProperties: - type: array - description: List of results ordered the same way as the project expected results - 404: - description: model or project doesn't exist - - # Blocks - /data-providers/{dataProviderId}/projects/{projectId}/blocks: - post: - summary: add a tree to an existing project block tree - tags: [Block] - operationId: backend.controller.pythonModuleDp.post_block_tree - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - description: project ID - required: true - - - name: data - in: body - schema: - type: object - required: - - blockTree - properties: - blockTree: - $ref: "#/definitions/blockTree" - required: true - - responses: - 200: - description: Block tree added - 403: - description: Invalid parameters - 404: - description: Project not found - - /data-providers/{dataProviderId}/projects/{projectId}/blocksFromSampleIds: - post: - summary: get a project tree form a sample list - tags: [Block] - operationId: backend.controller.data.get_data - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - description: project ID - required: true - - - name: data - in: body - required: true - schema: - type: object - required: - - sampleIds - properties: - sampleIds: - type: array - items: - type: [string, integer, number] - - analysis: - description: Informations about the analysis to help data-providers with data management - $ref: "#/definitions/analysis" - - responses: - 200: - description: Block tree with sample - schema: - type: object - 404: - description: Project or one of the models not found - - # Selections - /data-providers/{dataProviderId}/projects/{projectId}/selections/: - get: - summary: Get the project selections - tags: [Selection] - operationId: backend.controller.selection.get_selections - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - responses: - 200: - description: Project selections - schema: - type: array - items: - $ref: "#/definitions/selection" - - post: - summary: add a selection - tags: [Selection] - operationId: backend.controller.selection.post_selection - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: data - in: body - schema: - type: object - required: - - sampleHashList - - selectionName - properties: - sampleHashList: - type: array - items: - type: string - description: List of the selection sample id (hash) - selectionName: - type: string - requestDescription: - type: string - x-nullable: true - requestId: - type: string - description: Id of the request that has created the selection. - x-nullable: true - responses: - 200: - description: selection added - schema: - type: object - - ? /data-providers/{dataProviderId}/projects/{projectId}/selections/{selectionId} - : get: - summary: Get a project selection id list - tags: [Selection] - operationId: backend.controller.selection.get_selection_id_list - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: selectionId - in: path - type: string - required: true - responses: - 200: - description: Project selection id list - schema: - type: array - items: - type: string - 404: - description: Selection, project or data provider not found - - delete: - summary: delete a selection - tags: [Selection] - operationId: backend.controller.selection.delete_selection - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: selectionId - in: path - type: string - required: true - responses: - 200: - description: selection deleted - - # Analysis layouts - /app/layouts/: - get: - summary: Get all layouts - tags: [Layouts] - operationId: backend.controller.layouts.get_layouts - responses: - 200: - description: Layouts for all projects - schema: - type: array - items: - $ref: "#/definitions/projectLayout" - - post: - summary: Add a layout - tags: [Layouts] - operationId: backend.controller.layouts.post_layout - parameters: - - name: data - in: body - required: true - schema: - type: object - required: - - name - - projectId - - dataProviderId - - layout - properties: - name: - type: string - description: Name of the configuration - maxLength: 100 - minLength: 1 - projectId: - type: string - description: Id of the project linked to the configuration - dataProviderId: - type: string - description: Id of the data provider linked to the project id - description: - type: string - layout: - $ref: "#/definitions/dashboardLayout" - lastLayoutSaved: - type: boolean - description: If true, the layout will be loaded by default - If a layout with lastLayoutSaved=true already exists, it will - be replaced by the new one - default: false - selectedColorColumn: - type: string - description: Column selected to be used as color - x-nullable: true - - responses: - 204: - description: Dashboard layout saved - - /app/layouts/{id}: - delete: - summary: Delete a layout - tags: [Layouts] - operationId: backend.controller.layouts.delete_layout - parameters: - - name: id - in: path - type: string - required: true - description: Id of the layout to delete - minLength: 1 - responses: - 204: - description: Layout deleted - 404: - description: Layout not found - - # Widget configuration - /app/widget-configurations/: - get: - summary: Get all widget configurations overview, - return the number of configurations for each widget - tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.get_all_configurations - responses: - 200: - description: Widget configurations number for each widget - schema: - type: object - description: Object with widget title as key and number of - configurations as value - additionalProperties: - type: integer - minimum: 0 - - /app/widgets/{widgetKey}/configurations: - get: - summary: Get the widget configurations - tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.get_widget_configurations - parameters: - - name: widgetKey - in: path - description: Title of the wigdet linked to the configuration - type: string - required: true - minLength: 1 - responses: - 200: - description: Widget configurations list - schema: - type: array - items: - type: object - required: - - id - - name - - projectId - - dataProviderId - - configuration - properties: - id: - type: string - name: - type: string - description: - type: string - projectId: - type: string - description: Id of the project linked to the configuration - dataProviderId: - type: string - description: Id of the data provider linked to the project id - creationDate: - type: string - configuration: - type: object - description: Key value list with the configuration, specific to the widget - post: - summary: Add a widget configuration - tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.post_configuration - parameters: - - name: widgetKey - in: path - required: true - type: string - minLength: 1 - - - name: data - in: body - required: true - schema: - type: object - required: - - name - - projectId - - dataProviderId - - configuration - properties: - name: - type: string - description: Name of the configuration - maxLength: 100 - minLength: 1 - projectId: - type: string - description: Id of the project linked to the configuration - dataProviderId: - type: string - description: Id of the data provider linked to the project id - description: - type: string - configuration: - type: object - description: Key value list with the configuration, specific to the widget - maxProperties: 15 - - responses: - 204: - description: Widget configuration saved - - /app/widgets/{widgetKey}/configurations/{id}: - delete: - summary: Delete a widget configuration - tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.delete_configuration - parameters: - - name: widgetKey - in: path - type: string - required: true - minLength: 1 - - name: id - in: path - type: string - required: true - description: Id of the configuration to delete - minLength: 1 - responses: - 204: - description: Widget configuration saved - 404: - description: Widget title or configuration not found - - # Data export - /app/exportMethods: - get: - summary: Get the application export methods - tags: [Export] - operationId: backend.controller.exportMethods.get_export_methods - responses: - 200: - description: Export method list - schema: - type: array - items: - type: object - required: - - type - - name - - parameters - - parameterNames - properties: - type: - type: string - description: Export method type - name: - type: string - description: Export method name - minLength: 1 - parameters: - type: array - description: Export method parameters - parametersNames: - type: array - description: Name of the export method parameters according to the export type - - post: - summary: Create an export method for the app - tags: [Export] - operationId: backend.controller.exportMethods.post_export_method - parameters: - - name: data - in: body - schema: - type: object - required: - - type - - name - - parameters - properties: - type: - type: string - description: Export method type - name: - type: string - description: Export method name - minLength: 1 - parameters: - type: array - description: Export method parameters, can be anything as long as the export method type can read it - required: true - responses: - 200: - description: Export method added - 400: - description: Wrong type or parameters - - /app/exportMethods/{exportMethodId}: - delete: - summary: Remove an export method for the app - tags: [Export] - operationId: backend.controller.exportMethods.delete_export_method - parameters: - - name: exportMethodId - in: path - type: string - required: true - responses: - 200: - description: Export method removed - 404: - description: Unknown export method - - /app/exportMethods/{exportMethodId}/exportData: - post: - summary: Export data with an export method - tags: [Export] - operationId: backend.controller.exportMethods.exportData - parameters: - - name: exportMethodId - in: path - type: string - required: true - - name: data - in: body - schema: - type: object - responses: - 200: - description: Data exported - - /data-providers/{dataProviderId}/projects/{projectId}/exportSelection: - post: - summary: Export a selected sample id list from an export method - tags: [Export] - operationId: backend.controller.exportMethods.exportSelection - parameters: - - name: dataProviderId - in: path - type: string - required: true - - name: projectId - in: path - type: string - required: true - - name: data - in: body - schema: - type: object - required: - - sampleHashList - - selectionName - - exportMethodId - properties: - sampleHashList: - type: array - items: - type: string - description: List of the selected sample id (hash) - selectionName: - type: string - exportMethodId: - type: string - annotationValue: - type: string - description: Any value set by the user - responses: - 200: - description: Selection exported - - # Algo providers - /app/algo-providers: - get: - summary: Get all Algo providers and their algorithms - tags: [AlgoProviders] - operationId: backend.controller.algoProviders.get_algo_providers - responses: - 200: - description: Algorithms list - schema: - type: array - items: - type: object - $ref: "#/definitions/algoProvider" - - post: - summary: Add an Algo provider - tags: [AlgoProviders] - operationId: backend.controller.algoProviders.post_algo_provider - parameters: - - name: data - in: body - required: true - schema: - type: object - required: - - name - - url - properties: - name: - type: string - description: Name of the Algo provider - maxLength: 100 - minLength: 1 - url: - type: string - description: Url of the Algo provider - - responses: - 204: - description: Algorithm saved - - /app/algo-providers/{name}: - delete: - summary: Delete an Algo provider - tags: [AlgoProviders] - operationId: backend.controller.algoProviders.delete_algo_provider - parameters: - - name: name - in: path - type: string - required: true - description: Name of the Algo provider to delete - minLength: 1 - responses: - 204: - description: Algo provider deleted - 404: - description: Algo provider not found - - /app/algo-providers/{algoProviderName}/algorithms/use/{algoId}: - post: - summary: Use an algorithm of an Algo provider - tags: [AlgoProviders] - operationId: backend.controller.algoProviders.use_algo - parameters: - - name: algoProviderName - in: path - type: string - required: true - description: Name of the Algo provider to use - minLength: 1 - - name: algoId - in: path - type: string - required: true - description: Id of the algorithm to use - minLength: 1 - - name: data - in: body - required: true - schema: - type: object - required: - - inputs - properties: - inputs: - type: array - description: Inputs of the algorithm - items: - type: object - required: - - name - - value - properties: - name: - type: string - description: Name of the input, - must be the same as the one defined in the input list - value: - description: Value of the input, depending on the input type - - responses: - 200: - description: Algorithm result - schema: - type: object - required: - - outputs - properties: - outputs: - type: array - description: Outputs of the algorithm - items: - type: object - required: - - name - - value - properties: - name: - type: string - description: Name of the output, - must be the same as the one defined in the output list - value: - description: Value of the output, depending on the output type - - # Statistical operations - /statisticalOperations/pearsonCorrelation: - post: - summary: Calculate pearson correlation between rows - tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.pearsonCorrelation - parameters: - - name: data - in: body - description: Array of rows with the same sizes (discrete & continuous) - schema: - type: array - items: - type: array - items: - type: number - - responses: - 200: - description: pearson correlation matrix - schema: - type: array - items: - type: array - items: - type: array - items: - type: number - - 403: - description: Invalid Input array, not the same size - - /statisticalOperations/spearmanCorrelation: - post: - summary: Calculate spearman correlation between rows - tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.spearmanCorrelation - parameters: - - name: data - in: body - description: Array of rows with the same sizes (discrete & continuous) - schema: - type: array - items: - type: array - items: - type: number - - responses: - 200: - description: spearman correlation matrix - schema: - type: array - items: - type: array - items: - type: array - items: - type: number - - 403: - description: Invalid Input array, not the same size - - /statisticalOperations/continuousMutualInformation: - post: - summary: Calculate mutual informations - tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.mutualInformation - parameters: - - name: data - in: body - description: mutual information matrix - schema: - type: object - required: - - list_continuous - - list_discrete - properties: - k: - type: integer - description: number of neighbors - base: - type: integer - description: takes 2 (unit =bits), 10 (unit= nats) - list_continuous: - type: array - items: - type: array - items: - type: number - description: Array of rows with the same sizes (continuous only) - list_discrete: - type: array - items: - type: array - items: - type: number - description: Array of rows with the same sizes (dicrete only) - normalise: - type: string - default: "max" - description: This parameter is used to normalise the mutual information coefficient, it takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - - responses: - 200: - description: mutual information matrix - schema: - type: array - items: - type: array - items: - type: number - 403: - description: Invalid Input array, not the same size - - /statisticalOperations/higherDimensionMutualInformation: - post: - summary: Calculate the mutual information between variables - tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.higherDimensionMutualInformation - parameters: - - name: data - in: body - schema: - type: object - required: - - X - - k - properties: - k: - type: integer - description: number of neighbors (must be < to len(X)) - base: - type: integer - description: takes 2 (unit =bits), 10 (unit= nats) - default: 2 - X: - type: array - items: - type: array - items: - type: number - description: list of list of the variables, it can take more than 2 variables - - responses: - 200: - description: mutual information between the variables - schema: - type: number - 403: - description: Invalid Input - - /statisticalOperations/continuousAndHigherDimensionMutualInformation: - post: - summary: Calculate matrix mutual informations and the higher Dimension - tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.mutualAndHigherInformation - parameters: - - name: data - in: body - description: mutual information matrix - schema: - type: object - required: - - list_continuous - - list_discrete - - k - properties: - k: - type: integer - description: number of neighbors - base: - type: integer - description: takes 2 (unit =bits), 10 (unit= nats) - list_continuous: - type: array - items: - type: array - items: - type: number - description: Array of rows with the same sizes (continuous only) - list_discrete: - type: array - items: - type: array - items: - type: number - description: Array of rows with the same sizes (dicrete only) - normalise: - type: string - default: "max" - description: This parameter is used to normalise the mutual information coefficient, it takes either 'max' or 'min' or 'square root' or 'mean' or 'none' - - responses: - 200: - description: mutual information matrix - schema: - type: array - items: - type: array - items: - type: number - 403: - description: Invalid Input array, not the same size - -definitions: - # Projects - projectOverview: - type: object - required: - - id - - dataProviderId - - name - properties: - id: - type: string - description: project ID - - dataProviderId: - type: string - description: project data provider ID - - name: - type: string - description: project name - - nbModels: - type: integer - description: number of Models - - nbSelections: - type: integer - description: number of selections - - creationDate: - type: string - format: date-time - description: creation date - - updateDate: - type: string - format: date-time - description: last update date - - project: - allOf: - - $ref: "#/definitions/projectOverview" - - type: object - required: - - columns - - resultsStructure - properties: - columns: - description: list of the projects columns that will be used to display the data - type: array - items: - $ref: "#/definitions/column" - - resultsStructure: - description: list of the projects columns that will be used to display the model results - type: array - items: - $ref: "#/definitions/column" - - column: - description: column information, for data or model results - type: object - required: - - name - - type - properties: - name: - type: string - description: column name - category: - type: string - description: column category, by default it is 'other' or 'result' for model results - enum: - - other - - context - - input - - groundtruth - type: - type: string - description: column type - enum: - - auto - - text - - number - - boolean - - group: - type: string - description: column group, used to group columns in the UI - - blockLevelInfo: - type: object - required: - - name - properties: - name: - type: string - description: block level name - - # Artefact - artefact: - type: object - required: - - name - properties: - name: - type: string - creationDate: - type: string - format: date-time - updateDate: - type: string - format: date-time - version: - type: string - metadata: - type: object - additionalProperties: - type: string - example: - meteo: soleil - temperature: 50 - - # Models - modelOverview: - type: object - required: - - name - - id - properties: - name: - type: string - id: - type: string - nbEvaluatedSamples: - type: integer - updateDate: - type: string - format: date-time - creationDate: - type: string - format: date-time - metadata: - type: object - - model: - allOf: - - $ref: "#/definitions/artefact" - - type: object - properties: - hyperParameters: - type: array - items: - type: string - trainingLogs: - type: array - items: - type: string - - # Selections and requests - selection: - allOf: - - $ref: "#/definitions/artefact" - - type: object - properties: - nbSamples: - type: integer - - request: - allOf: - - $ref: "#/definitions/artefact" - - type: object - required: - - filters - properties: - filters: - type: array - items: - type: object - required: - - type - - columnLabel - properties: - type: - type: string - columnLabel: - type: string - description: Can be of type 'values' of 'intervals', 'values' - filters have a 'values' key that is a list of text or numbers - , 'intervals' filters have an 'interval' key that is a list of - {min, max} intervals - - # DataTypes - dataType: - type: object - properties: - gdtList: - type: array - items: - type: object - description: list of ground thruth (key - value) - - inputList: - type: array - items: - type: object - description: list of inputs (key - value) - - contextList: - type: array - items: - type: object - description: list of context (key - value) - - # block & sample - block: - allOf: - - $ref: "#/definitions/dataType" - - type: object - required: - - name - properties: - name: - type: string - - blockOverview: - type: object - required: - - name - properties: - name: - type: string - description: name of the block - - sample: - allOf: - - $ref: "#/definitions/artefact" - - type: object - - blockTree: - type: array - items: - $ref: "#/definitions/block" - - # Data providers - dataProvider: - type: object - properties: - name: - type: string - url: - type: string - status: - type: boolean - description: True if the data provider is up and running - type: - type: string - - # Analysis - analysis: - type: object - description: Data for the current analysis - required: - - id - properties: - id: - type: string - description: Unique ID generated for the analysis, it will be the same in this analysis requests - start: - type: boolean - description: True if this is the first request of the analysis - end: - type: boolean - description: True if this is the last request of the analysis - - # Layout - projectLayout: - description: Project layout, information about a dashboard layout - type: object - required: - - id - - name - - projectId - - dataProviderId - - layout - properties: - id: - type: string - name: - type: string - description: - type: string - projectId: - type: string - description: Id of the project linked to the layout - dataProviderId: - type: string - description: Id of the data provider linked to the project id - creationDate: - type: string - format: date-time - layout: - $ref: "#/definitions/dashboardLayout" - lastLayoutSaved: - type: boolean - description: True if this is the last layout saved - selectedColorColumn: - type: string - description: Column selected to be used as color - x-nullable: true - - dashboardLayout: - description: Dashboard layout, list of widgets with their - position and configuration - - type: array - items: - type: object - required: - - widgetKey - - x - - y - - width - - height - properties: - widgetKey: - type: string - description: Key of the widget - x: - type: integer - description: x position of the widget - y: - type: integer - description: y position of the widget - width: - type: integer - description: width of the widget - height: - type: integer - description: height of the widget - config: - type: object - description: Configuration of the widget - x-nullable: true - localFilters: - type: array - description: Filters applied to the widget - x-nullable: true - items: - type: object - - # Algo providers - algoProvider: - description: Informations about an AlgoProvider - required: - - url - - name - - status - - algorithms - properties: - name: - type: string - description: Name of the AlgoProvider - minLength: 1 - url: - type: string - description: Url of the AlgoProvider - minLength: 1 - status: - type: boolean - description: True if the algo provider is up and running - algorithms: - type: array - description: List of algorithms provided by the AlgoProvider - items: - type: object - description: Informations about an algorithm - required: - - id - - inputs - - outputs - properties: - id: - type: string - description: The id of the algorithm, must be unique, will be used to identify the algorithm - example: "my-algorithm-01" - name: - type: string - description: The name of the algorithm - example: "My algorithm 01" - description: - type: string - description: The description of the algorithm - example: "This algorithm is used to do something" - tags: - type: array - description: The list of tags of the algorithm - items: - type: string - example: ["tag1", "tag2"] - author: - type: string - description: The author of the algorithm - example: "Ada Lovelace" - creationDate: - type: string - description: The creation date of the algorithm, ISO 8601 format, YYYY-MM-DD - example: "2023-01-01" - format: date - x-nullable: true - updateDate: - type: string - description: The last update date of the algorithm, ISO 8601 format, YYYY-MM-DD - example: "2023-03-20" - format: date - x-nullable: true - version: - type: string - description: The version of the algorithm - example: "0.1.0" - inputs: - type: array - description: The list of inputs of the algorithm - items: - type: object - $ref: "#/definitions/algoInputOutput" - outputs: - type: array - description: The list of inputs of the algorithm - items: - type: object - $ref: "#/definitions/algoInputOutput" - - algoInputOutput: - type: object - description: Informations about an input or an output of an algorithm - required: - - name - - type - properties: - name: - type: string - description: The name of the input or output - example: "Input_A" - type: - type: string - enum: - - string - - number - - boolean - - array - - # Next is useless for outputs - availableValues: - type: array - description: The list of available values for this input - example: ["my value", "my other value"] - default: - description: The default value for this input - example: "my value" - min: - type: number - description: The minimum value for number inputs - example: 0 - max: - type: number - description: The maximum value for number inputs - example: 10 - arrayType: - type: string - description: For array inputs, specify type of the array - enum: - - string - - number - - boolean - lengthMin: - type: number - description: The minimum length of the array for array inputs - example: 0 - lengthMax: - type: number - description: The maximum length of the array for array inputs - example: 10 diff --git a/build/lib/backend/tests/__init__.py b/build/lib/backend/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/tests/test_algo_providers.py b/build/lib/backend/tests/test_algo_providers.py deleted file mode 100644 index b13addc5b..000000000 --- a/build/lib/backend/tests/test_algo_providers.py +++ /dev/null @@ -1,95 +0,0 @@ -import requests -import ujson as json - -appUrl = "http://localhost:3000/" -test_algo_provider_name = "test_create_algo_provider" - -algo_provider_list = [] - - -def test_get_algorithms(): - global algo_provider_list - url = appUrl + "app/algo-providers" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - print(resp.text) - algo_providers = json.loads(resp.text) - assert type(algo_providers) is list - - for algo_provider in algo_providers: - assert type(algo_provider) is dict - assert "name" in algo_provider - assert "url" in algo_provider - assert "status" in algo_provider - assert "algorithms" in algo_provider - - assert type(algo_provider["name"]) is str - assert type(algo_provider["url"]) is str - assert type(algo_provider["status"]) is bool - assert type(algo_provider["algorithms"]) is list - - for algo in algo_provider["algorithms"]: - assert type(algo) is dict - assert "id" in algo - assert "inputs" in algo - assert "outputs" in algo - - assert type(algo["id"]) is str - assert type(algo["inputs"]) is list - assert type(algo["outputs"]) is list - - for input in algo["inputs"]: - assert type(input) is dict - assert "name" in input - assert "type" in input - - assert type(input["name"]) is str - assert type(input["type"]) is str - - for output in algo["outputs"]: - assert type(output) is dict - assert "name" in output - assert "type" in output - - assert type(output["name"]) is str - assert type(output["type"]) is str - - algo_provider_list = algo_providers - - -def test_add_algo_provider(): - url = appUrl + "app/algo-providers" - data = {"name": test_algo_provider_name, "url": "http://localhost:4000"} - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 204 - - # Test that it exists - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - print(resp.text) - algo_providers = json.loads(resp.text) - assert type(algo_providers) is list - assert len(algo_providers) == len(algo_provider_list) + 1 - assert any( - algo_provider["name"] == test_algo_provider_name - for algo_provider in algo_providers - ) - - -def test_delete_algo_provider(): - url = appUrl + "app/algo-providers/" + test_algo_provider_name - resp = requests.delete(url=url, headers={}) - assert resp.status_code == 204 - - # Test that it was removed from the list - url = appUrl + "app/algo-providers" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - print(resp.text) - algo_providers = json.loads(resp.text) - assert type(algo_providers) is list - assert len(algo_providers) == len(algo_provider_list) - assert not any( - algo_provider["name"] == test_algo_provider_name - for algo_provider in algo_providers - ) diff --git a/build/lib/backend/tests/test_data_providers.py b/build/lib/backend/tests/test_data_providers.py deleted file mode 100644 index 41563f4dd..000000000 --- a/build/lib/backend/tests/test_data_providers.py +++ /dev/null @@ -1,96 +0,0 @@ -import requests -import ujson as json - -appUrl = "http://localhost:3000/" -test_data_provider_name = "test_create_data_provider" - -data_providers = [] - - -def test_get_data_providers(): - global data_providers - url = appUrl + "data-providers" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - print(resp.text) - assert type(json.loads(resp.text)) is list - - for dp in json.loads(resp.text): - assert type(dp) is dict - assert "name" in dp - assert "type" in dp - - data_providers = json.loads(resp.text) - - -def test_get_data_provider(): - for dp in data_providers: - url = appUrl + "data-providers/" + dp["name"] - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - - -def test_add_data_provider(): - url = appUrl + "data-providers" - data = {"name": test_data_provider_name, "type": "I DONT EXIST"} - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 400 - - -def test_add_web_data_provider(): - # Test no url - url = appUrl + "data-providers" - data = {"name": test_data_provider_name, "type": "Web"} - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 400 - - # Test bad url - data = {"name": test_data_provider_name, "type": "Web", "url": ""} - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 400 - data = {"name": test_data_provider_name, "type": "Web", "url": "I DONT EXIST"} - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 400 - - # Test good url - data = { - "name": test_data_provider_name, - "type": "Web", - "url": "http://localhost:4000", - } - resp = requests.post(url=url, headers={}, json=data) - assert resp.status_code == 204 - - # Test that it exists - url = appUrl + "data-providers/" + test_data_provider_name - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - - # Test that it was added to the list - url = appUrl + "data-providers" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - assert type(json.loads(resp.text)) is list - assert len(json.loads(resp.text)) == len(data_providers) + 1 - assert any(dp["name"] == test_data_provider_name for dp in json.loads(resp.text)) - - -def test_delete_data_provider(): - url = appUrl + "data-providers/" + test_data_provider_name - resp = requests.delete(url=url, headers={}) - assert resp.status_code == 204 - - # Test that it was removed from the list - url = appUrl + "data-providers" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - assert type(json.loads(resp.text)) is list - assert len(json.loads(resp.text)) == len(data_providers) - assert not any( - dp["name"] == test_data_provider_name for dp in json.loads(resp.text) - ) - - # Test that it no longer exists - url = appUrl + "data-providers/" + test_data_provider_name - resp = requests.get(url=url, headers={}) - assert resp.status_code == 404 diff --git a/build/lib/backend/tests/test_layouts.py b/build/lib/backend/tests/test_layouts.py deleted file mode 100644 index a34b0e5a5..000000000 --- a/build/lib/backend/tests/test_layouts.py +++ /dev/null @@ -1,131 +0,0 @@ -import requests -import ujson as json - -appUrl = "http://localhost:3000/" -testLayoutId = None - - -def delete_layout(id): - url = appUrl + "app/layouts/" + id - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 204 - - -def test_get_layouts(): - url = appUrl + "app/layouts" - resp = requests.request("GET", url, headers={}, data={}) - layouts = json.loads(resp.text) - print(layouts) - assert resp.status_code == 200 - assert type(layouts) is list - - # Remove all layouts - for layout in layouts: - assert type(layout) is dict - assert "id" in layout - delete_layout(layout["id"]) - - -def test_add_layout(): - global testLayoutId - url = appUrl + "app/layouts/" - data = { - "name": "testName", - "description": "testDescription", - "projectId": "testProjectId", - "dataProviderId": "testDataProviderId", - "layout": [ - { - "x": 0, - "y": 0, - "width": 1, - "height": 1, - "widgetKey": "testWidgetKey", - "config": {}, - "name": "testName", - } - ], - "selectedColorColumn": "TestColorColumn", - } - resp = requests.request("POST", url, headers={}, json=data) - assert resp.status_code == 204 - - # Check if the layout was added - resp = requests.request("GET", url, headers={}, data={}) - layouts = json.loads(resp.text) - assert resp.status_code == 200 - assert type(layouts) is list - assert len(layouts) == 1 - assert layouts[0]["name"] == data["name"] - assert layouts[0]["description"] == data["description"] - assert layouts[0]["projectId"] == data["projectId"] - assert layouts[0]["dataProviderId"] == data["dataProviderId"] - assert type(layouts[0]["layout"]) is list - assert len(layouts[0]["layout"]) == 1 - assert layouts[0]["layout"][0]["x"] == data["layout"][0]["x"] - assert layouts[0]["layout"][0]["y"] == data["layout"][0]["y"] - assert layouts[0]["selectedColorColumn"] == data["selectedColorColumn"] - - assert "id" in layouts[0] - assert type(layouts[0]["id"]) is str - assert len(layouts[0]["id"]) > 0 - testLayoutId = layouts[0]["id"] - - -def test_delete_layout(): - # Remove the layout - delete_layout(testLayoutId) - - # Check if the layout was removed - url = appUrl + "app/layouts/" - resp = requests.request("GET", url, headers={}, data={}) - layouts = json.loads(resp.text) - assert resp.status_code == 200 - assert type(layouts) is list - assert len(layouts) == 0 - - -def test_last_layout_saved(): - # if lastLayoutSaved is true, the previous layout with lastLayoutSaved = true - # should be deleted - - # Add the first layout - url = appUrl + "app/layouts/" - data = { - "name": "testName", - "description": "testDescription", - "projectId": "testProjectId", - "dataProviderId": "testDataProviderId", - "lastLayoutSaved": True, - "layout": [], - } - resp = requests.request("POST", url, headers={}, json=data) - assert resp.status_code == 204 - - # Add the second layout - data = { - "name": "testName2", - "description": "testDescription", - "projectId": "testProjectId", - "dataProviderId": "testDataProviderId", - "lastLayoutSaved": True, - "layout": [], - } - resp = requests.request("POST", url, headers={}, json=data) - assert resp.status_code == 204 - - # Check if the first layout was removed - url = appUrl + "app/layouts/" - resp = requests.request("GET", url, headers={}, data={}) - layouts = json.loads(resp.text) - assert resp.status_code == 200 - assert type(layouts) is list - assert len(layouts) == 1 - assert layouts[0]["lastLayoutSaved"] is True - assert layouts[0]["name"] == data["name"] - assert layouts[0]["description"] == data["description"] - assert layouts[0]["projectId"] == data["projectId"] - assert layouts[0]["dataProviderId"] == data["dataProviderId"] - - # Remove the layout - delete_layout(layouts[0]["id"]) diff --git a/build/lib/backend/tests/test_pythonModuleDataProvider.py b/build/lib/backend/tests/test_pythonModuleDataProvider.py deleted file mode 100644 index b81567895..000000000 --- a/build/lib/backend/tests/test_pythonModuleDataProvider.py +++ /dev/null @@ -1,137 +0,0 @@ -import requests -import ujson as json - -PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" -appUrl = "http://localhost:3000/" -test_project_name = "test_create_project" -test_project_id = None - -# ============== PROJECTS ================= - - -def test_get_projects(): - url = appUrl + "projects" - resp = requests.get(url=url, headers={}) - assert resp.status_code == 200 - assert type(json.loads(resp.text)) is list - - -def test_get_bad_project(): - projectId = "I_DO_NOT_EXIST" - url = ( - appUrl + "data-providers/" + PYTHON_DATA_PROVIDER_ID + "/projects/" + projectId - ) - resp = requests.request("GET", url, headers={}, data={}) - assert resp.status_code == 404 - - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 404 - - -def test_create_project_noName(): - # create - url = appUrl + "projects" - resp = requests.post(url=url, headers={}, json={}) - assert resp.status_code == 400 - - -def test_create_project(): - global test_project_id - # delete if exists - projectId = test_project_name - url = ( - appUrl + "data-providers/" + PYTHON_DATA_PROVIDER_ID + "/projects/" + projectId - ) - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 200 or resp.status_code == 404 - - # create - url = appUrl + "projects" - resp = requests.post(url=url, headers={}, json={"projectName": test_project_name}) - assert resp.status_code == 200 - - # Get Id - data = json.loads(resp.text) - test_project_id = data["id"] - assert test_project_id is not None - assert len(test_project_id) > 0 - assert type(test_project_id) is str - - # Test can't create same project - resp = requests.post(url=url, headers={}, json={"projectName": test_project_name}) - assert resp.status_code == 400 - assert "already exists" in resp.text - - -def test_get_project(): - # Find back - url = ( - appUrl - + "data-providers/" - + PYTHON_DATA_PROVIDER_ID - + "/projects/" - + test_project_id - ) - resp = requests.request("GET", url, headers={}, json={}) - assert resp.status_code == 200 - proj = json.loads(resp.text) - assert type(proj) is dict - assert type(proj["columns"]) is list - assert proj["models"] == [] - assert len(proj["name"]) > 0 - assert proj["name"] == test_project_name - - -def test_remove_project(): - # Project exists back - url = ( - appUrl - + "data-providers/" - + PYTHON_DATA_PROVIDER_ID - + "/projects/" - + test_project_id - ) - resp = requests.request("GET", url, headers={}, json={}) - assert resp.status_code == 200 - - # remove - url = ( - appUrl - + "data-providers/" - + PYTHON_DATA_PROVIDER_ID - + "/projects/" - + test_project_id - ) - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 200 - - # Dont Find back - url = ( - appUrl - + "data-providers/" - + PYTHON_DATA_PROVIDER_ID - + "/projects/" - + test_project_id - ) - resp = requests.request("GET", url, headers={}, json={}) - assert resp.status_code == 404 - - # Cant remove again - url = ( - appUrl - + "data-providers/" - + PYTHON_DATA_PROVIDER_ID - + "/projects/" - + test_project_id - ) - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 404 - - -def test_project_nameTooLong(): - testProjectName = "a" * 256 - url = appUrl + "projects" - payload = {"projectName": testProjectName, "blockLevelInfo": []} - headers = {"content-type": "application/json"} - resp = requests.post(url=url, headers=headers, json=payload) - assert resp.status_code == 400 diff --git a/build/lib/backend/tests/test_widget_configurations.py b/build/lib/backend/tests/test_widget_configurations.py deleted file mode 100644 index 34c2a7613..000000000 --- a/build/lib/backend/tests/test_widget_configurations.py +++ /dev/null @@ -1,85 +0,0 @@ -import requests -import ujson as json - -appUrl = "http://localhost:3000/" -testWidgetKey = "testWidgetKey" -testConfigurationId = None - - -def delete_configuration(id): - url = appUrl + "app/widgets/" + testWidgetKey + "/configurations/" + id - resp = requests.request("DELETE", url, headers={}, data={}) - assert resp.status_code == 204 - - -def test_get_configurations(): - url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" - resp = requests.request("GET", url, headers={}, data={}) - configurations = json.loads(resp.text) - print(configurations) - assert resp.status_code == 200 - assert type(configurations) is list - - # Remove all configurations - for conf in configurations: - assert type(conf) is dict - assert "id" in conf - delete_configuration(conf["id"]) - - -def test_add_configuration(): - global testConfigurationId - url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" - data = { - "name": "testName", - "description": "testDescription", - "projectId": "testProjectId", - "dataProviderId": "testDataProviderId", - "configuration": {"testKey": "testValue"}, - } - resp = requests.request("POST", url, headers={}, json=data) - assert resp.status_code == 204 - - # Check if the configuration was added - resp = requests.request("GET", url, headers={}, data={}) - configurations = json.loads(resp.text) - assert resp.status_code == 200 - assert type(configurations) is list - assert len(configurations) == 1 - assert configurations[0]["name"] == data["name"] - assert configurations[0]["description"] == data["description"] - assert configurations[0]["projectId"] == data["projectId"] - assert configurations[0]["dataProviderId"] == data["dataProviderId"] - assert type(configurations[0]["configuration"]) is dict - assert ( - configurations[0]["configuration"]["testKey"] - == data["configuration"]["testKey"] - ) - - assert "id" in configurations[0] - assert type(configurations[0]["id"]) is str - assert len(configurations[0]["id"]) > 0 - testConfigurationId = configurations[0]["id"] - - -def test_get_configurations_overview(): - url = appUrl + "app/widget-configurations" - resp = requests.request("GET", url, headers={}, data={}) - configurations = json.loads(resp.text) - assert resp.status_code == 200 - assert type(configurations) is dict - assert testWidgetKey in configurations - assert configurations[testWidgetKey] == 1 - - -def test_delete_configuration(): - # Remove the configuration - delete_configuration(testConfigurationId) - - # Check if the configuration was removed - url = appUrl + "app/widgets/" + testWidgetKey + "/configurations" - resp = requests.request("GET", url, headers={}, data={}) - configurations = json.loads(resp.text) - assert resp.status_code == 200 - assert type(configurations) is list - assert len(configurations) == 0 diff --git a/build/lib/backend/utils/__init__.py b/build/lib/backend/utils/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/utils/layouts/__init__.py b/build/lib/backend/utils/layouts/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/utils/layouts/layouts.py b/build/lib/backend/utils/layouts/layouts.py deleted file mode 100644 index 0fdc654e3..000000000 --- a/build/lib/backend/utils/layouts/layouts.py +++ /dev/null @@ -1,149 +0,0 @@ -import os -import json -import backend.utils.utils as utils -import uuid - - -LAYOUTS_PATH = "data/layouts.json" - - -# Layouts file structure -# [ -# { -# "id": "" -# "name": "" -# "description": "", -# "projectId": "" -# "dataProviderId": "" -# "creationDate": 0 -# "layout": [ -# # Widget position -# { -# "widgetKey": "parallelCoordinate", -# "x": 0, -# "y": 0, -# "width": 0, -# "height": 0, -# "config": {}, # Widget config (optional) -# "name": "", # Name given to the widget (optional) -# "localFilters" : [{}], -# }, -# ], -# "selectedColorColumn": "col", # (optional) -# }, -# ... -# ] - - -def setup_layouts(): - # Create the folder if it does not exist - if not os.path.exists("data"): - os.mkdir("data") - - # Create the file if it does not exist - if not os.path.exists(LAYOUTS_PATH): - with open(LAYOUTS_PATH, "w") as json_file: - json.dump([], json_file) - - -def get_layouts(): - # Return the layouts list - try: - with open(LAYOUTS_PATH) as json_file: - return json.load(json_file) - - except FileNotFoundError: - setup_layouts() - return [] - - except json.decoder.JSONDecodeError as e: - print("Error while reading the layouts file") - print(e) - print("The file will be reset") - _save_layouts([]) - return [] - - -def add_layout(data): - # project_id, data_provider_id, conf_description, conf_name, conf - # Add a new widget layout - # Generate id - id = str(uuid.uuid1()) - - layout_to_add = [] - - for widget in data["layout"]: - widget_position = { - "x": widget["x"], - "y": widget["y"], - "width": widget["width"], - "height": widget["height"], - "widgetKey": widget["widgetKey"], - } - - keys = ["config", "name", "localFilters"] - - for key in keys: - if key in widget: - widget_position[key] = widget[key] - - layout_to_add.append(widget_position) - - file_to_add = { - "id": id, - "name": data["name"], - "description": data["description"], - "projectId": data["projectId"], - "dataProviderId": data["dataProviderId"], - "creationDate": utils.timeNow(), - "layout": layout_to_add, - "lastLayoutSaved": False, - } - - if "selectedColorColumn" in data: - file_to_add["selectedColorColumn"] = data["selectedColorColumn"] - - layouts = get_layouts() - - # Check if their is already a "last saved" layout - if "lastLayoutSaved" in data and data["lastLayoutSaved"]: - file_to_add["lastLayoutSaved"] = True - - for layout in layouts: - if ( - layout["projectId"] == data["projectId"] - and layout["dataProviderId"] == data["dataProviderId"] - and "lastLayoutSaved" in layout - and layout["lastLayoutSaved"] - ): - # Remove the "last saved" layout - layouts.remove(layout) - - # Save layout - layouts.append(file_to_add) - _save_layouts(layouts) - - -def delete_layout(id): - # Delete the widget layout by its name - layouts = get_layouts() - - for layout in layouts: - if layout["id"] == id: - layouts.remove(layout) - - _save_layouts(layouts) - - -def _save_layouts(layouts, retry=False): - # Update the json file - try: - with open(LAYOUTS_PATH, "w") as json_file: - json.dump(layouts, json_file) - except FileNotFoundError: - if not retry: - setup_layouts() - _save_layouts(layouts, True) - else: - print("Error while saving the layouts file") - print("The file will not be saved") diff --git a/build/lib/backend/utils/utils.py b/build/lib/backend/utils/utils.py deleted file mode 100644 index a131b89de..000000000 --- a/build/lib/backend/utils/utils.py +++ /dev/null @@ -1,46 +0,0 @@ -import time -import yaml -from yaml.loader import SafeLoader -from urllib.parse import urlparse - - -def get_app_version(): - # Read the version from the API YAML file - try: - with open("swagger.yaml") as f: - data = yaml.load(f, Loader=SafeLoader) - return data["info"]["version"] - except Exception as e: - print(e) - return "?.?.?" - - -# Date -def timeNow(): - return time.time() * 1000 - - -# Url -def is_url_valid(url): - try: - result = urlparse(url) - return all([result.scheme, result.netloc]) - except Exception: - return False - - -# Name -def is_valid_name(name): - # /, &, | are not allowed in data-providers & algo-providers names - if ( - "/" in name - or "&" in name - or "|" in name - or len(name) == 0 - or len(name) > 50 - or name[0] == " " - or name[-1] == " " - ): - return False - - return True diff --git a/build/lib/backend/utils/widgetConfigurations/__init__.py b/build/lib/backend/utils/widgetConfigurations/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py b/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py deleted file mode 100644 index 8fddf1d59..000000000 --- a/build/lib/backend/utils/widgetConfigurations/widgetConfigurations.py +++ /dev/null @@ -1,123 +0,0 @@ -import os -import json -import backend.utils.utils as utils -import uuid - -CONF_PATH = "data/widgetConfigurations.json" - -# Configuration file structure -# { -# "widgetKey": [ -# { -# "id": "" -# "name": "" -# "description": "", -# "projectId": "" -# "dataProviderId": "" -# "creationDate": 0 -# -# "configuration": {}, -# }, -# ... -# ] -# } - - -def setup_widget_configurations(): - # Create the folder if it does not exist - if not os.path.exists("data"): - os.mkdir("data") - - # Create the file if it does not exist - if not os.path.exists(CONF_PATH): - _save_configurations({}) - - -def get_configurations_overview(): - # Return the number of configurations for each widget - all_configurations = _get_all_configurations() - - configurations_overview = {} - for widget_key in all_configurations: - configurations_overview[widget_key] = len(all_configurations[widget_key]) - - return configurations_overview - - -def get_configurations(widget_key): - # Return the configurations list of the widget - all_configurations = _get_all_configurations() - - if widget_key in all_configurations: - return all_configurations[widget_key] - else: - return [] - - -def add_configuration(widget_key, data): - # project_id, data_provider_id, conf_description, conf_name, conf - # Add a new widget configuration - configurations = _get_all_configurations() - - if widget_key not in configurations: - configurations[widget_key] = [] - - # Generate id - id = str(uuid.uuid1()) - - configuration_to_add = { - "id": id, - "name": data["name"], - "description": data["description"], - "projectId": data["projectId"], - "dataProviderId": data["dataProviderId"], - "creationDate": utils.timeNow(), - "configuration": data["configuration"], - } - - # Save configuration - configurations[widget_key].append(configuration_to_add) - _save_configurations(configurations) - - -def delete_configuration(widget_key, id): - # Delete the widget configuration by its name - configurations = _get_all_configurations() - - if widget_key in configurations: - for configuration in configurations[widget_key]: - if configuration["id"] == id: - configurations[widget_key].remove(configuration) - - _save_configurations(configurations) - - -def _get_all_configurations(): - # Return the configurations list of all widgets - try: - with open(CONF_PATH) as json_file: - return json.load(json_file) - - except FileNotFoundError: - setup_widget_configurations() - return {} - except json.decoder.JSONDecodeError as e: - print("Error while reading the widget configurations file") - print(e) - print("The file will be reset") - _save_configurations({}) - return {} - - -def _save_configurations(conf, retry=False): - # Update the json file - try: - with open(CONF_PATH, "w") as json_file: - json.dump(conf, json_file) - except FileNotFoundError: - if not retry: - setup_widget_configurations() - _save_configurations(conf, True) - else: - print("Error while saving the widget configurations file") - print("The file will not be saved") diff --git a/build/lib/backend/websrv.py b/build/lib/backend/websrv.py deleted file mode 100644 index 22db667d4..000000000 --- a/build/lib/backend/websrv.py +++ /dev/null @@ -1,78 +0,0 @@ -import connexion -import os -import requests -from termcolor import colored -from flask_cors import CORS -from flask import send_from_directory, request, Response -from init import init -from backend.utils.utils import get_app_version -from backend.config.init_config import DEBUG_COLOR - -DEV_FRONTEND_URL = "http://localhost:8080/" -PORT = 3000 - - -app = connexion.App(__name__) -app.add_api("swagger.yaml", strict_validation=True) -CORS(app.app) - - -def send_frontend(path): - if path == "/": - path = "index.html" - - # If production, use the index.html from the dist folder - if os.getenv("FLASK_ENV") == "production": - return send_from_directory("dist", path) - - # In development, redirect to the DEV_FRONTEND_URL - else: - if request.method == "GET": - try: - resp = requests.get(f"{DEV_FRONTEND_URL}{path}") - excluded_headers = [ - "content-encoding", - "content-length", - "transfer-encoding", - "connection", - ] - headers = [ - (name, value) - for (name, value) in resp.raw.headers.items() - if name.lower() not in excluded_headers - ] - response = Response(resp.content, resp.status_code, headers) - return response - except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): - return ( - "You are in a development environment and the DebAI frontend" - + "is not available at the url : " - + DEV_FRONTEND_URL, - 503, - ) - else: - print("Unexpected request method") - - -# For serving the dashboard -@app.route("/") -def send_index(): - return send_frontend("/") - - -# For serving the dashboard assets -@app.route("/") -def send_supporting_elements(path): - return send_frontend(path) - - -if __name__ == "__main__": - # Run DebiAI init - print("================= DebiAI " + get_app_version() + " ====================") - init() - print("======================== RUN =======================") - print( - " DebiAI is available at " - + colored("http://localhost:" + str(PORT), DEBUG_COLOR) - ) - app.run(port=PORT, debug=True) From 551bb0ad864d2be0d57fcd8493fe6392cfda7603 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 15:51:31 +0200 Subject: [PATCH 17/97] cspell --- .../widgets/WidgetTemplateFull/WidgetTemplateFull.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue index ef033264b..2d10337b8 100644 --- a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue +++ b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue @@ -443,7 +443,7 @@ export default { // that can watched and used to redraw a plot (see created()) }, redrawRequired: function () { - // The colored colum has changed + // The colored column has changed // We cat tell the parent widget that an update is required this.$parent.colorWarning = true; // A redraw btn will be displayed, pressing it will send redraw From 509ca760e851417c28ce86532032b7d61da02496 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 28 Jun 2024 16:05:02 +0200 Subject: [PATCH 18/97] cspell column --- .../widgets/WidgetTemplateFull/WidgetTemplateFull.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue index ef033264b..2d10337b8 100644 --- a/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue +++ b/frontend/src/components/debiai/dataAnalysis/widgets/WidgetTemplateFull/WidgetTemplateFull.vue @@ -443,7 +443,7 @@ export default { // that can watched and used to redraw a plot (see created()) }, redrawRequired: function () { - // The colored colum has changed + // The colored column has changed // We cat tell the parent widget that an update is required this.$parent.colorWarning = true; // A redraw btn will be displayed, pressing it will send redraw From ffa0095e83eb5596bbbb98f4696da3db53612d0c Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 1 Jul 2024 10:09:32 +0200 Subject: [PATCH 19/97] fix browser opening --- backend/server.py | 4 +++- backend/websrv.py | 1 - 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/backend/server.py b/backend/server.py index 10b6beaeb..22884de8a 100644 --- a/backend/server.py +++ b/backend/server.py @@ -1,5 +1,7 @@ -from backend.websrv import start_server +from threading import Timer +from backend.websrv import start_server, open_browser def run(): + Timer(1, open_browser).start() start_server(reloader=False) diff --git a/backend/websrv.py b/backend/websrv.py index df51dd280..c53a53b6b 100644 --- a/backend/websrv.py +++ b/backend/websrv.py @@ -99,5 +99,4 @@ def start_server(reloader=True): + colored("http://localhost:" + str(PORT), DEBUG_COLOR) ) app = create_app() - Timer(1, open_browser).start() app.run(port=PORT, debug=True, use_reloader=reloader) From 64f167d554f29e30322790c6d4777eb2363f3d6a Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 1 Jul 2024 10:10:28 +0200 Subject: [PATCH 20/97] enhanced setup file --- setup.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index 8a6aa463c..58cca091f 100644 --- a/setup.py +++ b/setup.py @@ -27,10 +27,9 @@ "debiai-start=backend.server:run", ], }, - author="Fady Bekkar", - author_email="fady.bekkar@irt-systemx.fr", - description="Python module that allows users to have a standalone DebiAI" - "version.", + author="IRT-Systemx", + author_email="debiai@irt-systemx.fr", + description="DebiAI easy start module, the standalone version of DebiAI", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/debiai/DebiAI", From 30685c93f48a341fd2f9d991062afecae1f6d9bc Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 1 Jul 2024 11:35:16 +0200 Subject: [PATCH 21/97] comments --- backend/websrv.py | 2 +- build_and_run.sh | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/backend/websrv.py b/backend/websrv.py index c53a53b6b..b90720625 100644 --- a/backend/websrv.py +++ b/backend/websrv.py @@ -48,7 +48,7 @@ def send_frontend(path): return response except (requests.exceptions.ConnectionError, requests.exceptions.Timeout): return ( - "You are in a development environment and the DebAI frontend" + "You are in a development environment and the DebiAI frontend" + "is not available at the url : " + DEV_FRONTEND_URL, 503, diff --git a/build_and_run.sh b/build_and_run.sh index 0c97d5b48..213813a2b 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -1,9 +1,13 @@ #!/bin/bash +# Remove previous build and dist directories rm -rf build dist backend.egg-info +# Generated source distribution and wheel distribution python3 setup.py sdist bdist_wheel +# Install the package pip install . +# Run the package debiai-start From 43a4a7838ec142be68a132fc9498ca78bde30ba0 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 10:27:42 +0200 Subject: [PATCH 22/97] rename backend to debiaiServer --- MANIFEST.in | 4 +- backend/__init__.py | 1 - backend/init.py | 45 --------- cspell.json | 4 +- debiaiServer/__init__.py | 1 + {backend => debiaiServer}/config/__init__.py | 0 {backend => debiaiServer}/config/config.env | 0 {backend => debiaiServer}/config/config.ini | 0 .../config/init_config.py | 2 +- .../controller/__init__.py | 0 .../controller/algoProviders.py | 10 +- {backend => debiaiServer}/controller/data.py | 4 +- .../controller/dataProviders.py | 10 +- .../controller/exportMethods.py | 4 +- .../controller/layouts.py | 2 +- .../controller/models.py | 4 +- .../controller/projects.py | 4 +- .../controller/pythonModuleDp.py | 4 +- .../controller/selection.py | 4 +- .../controller/statisticalOperations.py | 0 .../controller/widgetConfigurations.py | 2 +- debiaiServer/init.py | 38 ++++++++ {backend => debiaiServer}/modules/__init__.py | 0 .../modules/algoProviders/AlgoProvider.py | 2 +- .../algoProviders/AlgoProviderException.py | 0 .../modules/algoProviders/__init__.py | 0 .../algoProviders/algoProvidersManager.py | 8 +- .../integratedAlgoProvider/__init__.py | 0 .../algorithms/__init__.py | 0 .../algorithms/classificationErrorMetric.py | 2 +- .../algorithms/regressionErrorMetric.py | 2 +- .../integratedAlgoProvider.py | 8 +- .../integratedAlgoProvider/utils.py | 0 .../modules/dataProviders/DataProvider.py | 0 .../dataProviders/DataProviderException.py | 0 .../modules/dataProviders/__init__.py | 0 .../dataProviders/dataProviderManager.py | 8 +- .../pythonDataProvider/PythonDataProvider.py | 10 +- .../pythonDataProvider/__init__.py | 0 .../pythonDataProvider/dataUtils/__init__.py | 0 .../pythonDataProvider/dataUtils/hash.py | 2 +- .../pythonDataProvider/dataUtils/models.py | 4 +- .../pythonDataProvider/dataUtils/projects.py | 2 +- .../dataUtils/pythonModuleUtils.py | 0 .../pythonDataProvider/dataUtils/samples.py | 2 +- .../dataUtils/selections.py | 2 +- .../pythonDataProvider/dataUtils/tags.py | 2 +- .../pythonDataProvider/dataUtils/tree.py | 2 +- .../webDataProvider/WebDataProvider.py | 14 +-- .../dataProviders/webDataProvider/__init__.py | 0 .../webDataProvider/cache/__init__.py | 0 .../webDataProvider/cache/cache.py | 2 +- .../webDataProvider/http/__init__.py | 0 .../dataProviders/webDataProvider/http/api.py | 2 +- .../webDataProvider/useCases/__init__.py | 0 .../webDataProvider/useCases/data.py | 2 +- .../webDataProvider/useCases/models.py | 4 +- .../webDataProvider/useCases/projects.py | 6 +- .../webDataProvider/useCases/selections.py | 4 +- .../modules/exportMethods/__init__.py | 0 .../modules/exportMethods/exportClass.py | 0 .../modules/exportMethods/exportUtils.py | 10 +- .../modules/exportMethods/methods/__init__.py | 0 .../exportMethods/methods/kafkaUtils.py | 2 +- .../exportMethods/methods/postUtils.py | 2 +- {backend => debiaiServer}/requirements.txt | 0 {backend => debiaiServer}/server.py | 2 +- {backend => debiaiServer}/setup.cfg | 0 {backend => debiaiServer}/swagger.yaml | 94 +++++++++---------- {backend => debiaiServer}/tests/README.md | 0 {backend => debiaiServer}/tests/__init__.py | 0 .../tests/test_algo_providers.py | 0 .../tests/test_data_providers.py | 0 .../tests/test_layouts.py | 0 .../tests/test_pythonModuleDataProvider.py | 0 .../tests/test_widget_configurations.py | 0 {backend => debiaiServer}/utils/__init__.py | 0 .../utils/layouts/__init__.py | 0 .../utils/layouts/layouts.py | 2 +- {backend => debiaiServer}/utils/utils.py | 2 +- .../utils/widgetConfigurations/__init__.py | 0 .../widgetConfigurations.py | 2 +- {backend => debiaiServer}/websrv.py | 6 +- docker-compose-build.yml | 2 +- makefile | 16 ++-- setup.py | 6 +- 86 files changed, 186 insertions(+), 193 deletions(-) delete mode 100644 backend/__init__.py delete mode 100644 backend/init.py create mode 100644 debiaiServer/__init__.py rename {backend => debiaiServer}/config/__init__.py (100%) rename {backend => debiaiServer}/config/config.env (100%) rename {backend => debiaiServer}/config/config.ini (100%) rename {backend => debiaiServer}/config/init_config.py (99%) rename {backend => debiaiServer}/controller/__init__.py (100%) rename {backend => debiaiServer}/controller/algoProviders.py (85%) rename {backend => debiaiServer}/controller/data.py (86%) rename {backend => debiaiServer}/controller/dataProviders.py (88%) rename {backend => debiaiServer}/controller/exportMethods.py (92%) rename {backend => debiaiServer}/controller/layouts.py (91%) rename {backend => debiaiServer}/controller/models.py (91%) rename {backend => debiaiServer}/controller/projects.py (94%) rename {backend => debiaiServer}/controller/pythonModuleDp.py (92%) rename {backend => debiaiServer}/controller/selection.py (90%) rename {backend => debiaiServer}/controller/statisticalOperations.py (100%) rename {backend => debiaiServer}/controller/widgetConfigurations.py (91%) create mode 100644 debiaiServer/init.py rename {backend => debiaiServer}/modules/__init__.py (100%) rename {backend => debiaiServer}/modules/algoProviders/AlgoProvider.py (97%) rename {backend => debiaiServer}/modules/algoProviders/AlgoProviderException.py (100%) rename {backend => debiaiServer}/modules/algoProviders/__init__.py (100%) rename {backend => debiaiServer}/modules/algoProviders/algoProvidersManager.py (91%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/__init__.py (100%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py (100%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py (97%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py (98%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py (91%) rename {backend => debiaiServer}/modules/algoProviders/integratedAlgoProvider/utils.py (100%) rename {backend => debiaiServer}/modules/dataProviders/DataProvider.py (100%) rename {backend => debiaiServer}/modules/dataProviders/DataProviderException.py (100%) rename {backend => debiaiServer}/modules/dataProviders/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/dataProviderManager.py (91%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/PythonDataProvider.py (96%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/hash.py (94%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/models.py (98%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/projects.py (99%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py (100%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/samples.py (98%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/selections.py (97%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/tags.py (97%) rename {backend => debiaiServer}/modules/dataProviders/pythonDataProvider/dataUtils/tree.py (99%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/WebDataProvider.py (84%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/cache/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/cache/cache.py (98%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/http/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/http/api.py (98%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/useCases/__init__.py (100%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/useCases/data.py (89%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/useCases/models.py (91%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/useCases/projects.py (94%) rename {backend => debiaiServer}/modules/dataProviders/webDataProvider/useCases/selections.py (93%) rename {backend => debiaiServer}/modules/exportMethods/__init__.py (100%) rename {backend => debiaiServer}/modules/exportMethods/exportClass.py (100%) rename {backend => debiaiServer}/modules/exportMethods/exportUtils.py (93%) rename {backend => debiaiServer}/modules/exportMethods/methods/__init__.py (100%) rename {backend => debiaiServer}/modules/exportMethods/methods/kafkaUtils.py (96%) rename {backend => debiaiServer}/modules/exportMethods/methods/postUtils.py (95%) rename {backend => debiaiServer}/requirements.txt (100%) rename {backend => debiaiServer}/server.py (64%) rename {backend => debiaiServer}/setup.cfg (100%) rename {backend => debiaiServer}/swagger.yaml (93%) rename {backend => debiaiServer}/tests/README.md (100%) rename {backend => debiaiServer}/tests/__init__.py (100%) rename {backend => debiaiServer}/tests/test_algo_providers.py (100%) rename {backend => debiaiServer}/tests/test_data_providers.py (100%) rename {backend => debiaiServer}/tests/test_layouts.py (100%) rename {backend => debiaiServer}/tests/test_pythonModuleDataProvider.py (100%) rename {backend => debiaiServer}/tests/test_widget_configurations.py (100%) rename {backend => debiaiServer}/utils/__init__.py (100%) rename {backend => debiaiServer}/utils/layouts/__init__.py (100%) rename {backend => debiaiServer}/utils/layouts/layouts.py (98%) rename {backend => debiaiServer}/utils/utils.py (94%) rename {backend => debiaiServer}/utils/widgetConfigurations/__init__.py (100%) rename {backend => debiaiServer}/utils/widgetConfigurations/widgetConfigurations.py (98%) rename {backend => debiaiServer}/websrv.py (95%) diff --git a/MANIFEST.in b/MANIFEST.in index 4a5a96589..7b9fe797a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,3 @@ -include backend/swagger.yaml -recursive-include backend/dist * +include debiaiServer/swagger.yaml +recursive-include debiaiServer/dist * diff --git a/backend/__init__.py b/backend/__init__.py deleted file mode 100644 index afd319a5f..000000000 --- a/backend/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from backend.websrv import send_frontend, create_app, start_server diff --git a/backend/init.py b/backend/init.py deleted file mode 100644 index d0f645742..000000000 --- a/backend/init.py +++ /dev/null @@ -1,45 +0,0 @@ -# import backend.modules.dataProviders.dataProviderManager as dataProviderManager -# import backend.modules.exportMethods.exportUtils as exportUtils -# import backend.modules.algoProviders.algoProvidersManager as algoProvidersManager -# import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils -# import backend.utils.layouts.layouts as layoutsUtils -# import config.init_config as config - -from backend.modules.dataProviders import ( - dataProviderManager, -) -from backend.modules.exportMethods import ( - exportUtils, -) -from backend.modules.algoProviders import ( - algoProvidersManager, -) -from backend.utils.widgetConfigurations import ( - widgetConfigurations as widgetConfUtils, -) -from backend.utils.layouts import ( - layouts as layoutsUtils, -) -from backend.config import ( - init_config as config, -) - - -def init(): - # Init config file - config.init_config() - - # Init data providers - dataProviderManager.setup_data_providers() - - # Init AlgoProviders - algoProvidersManager.setup_algo_providers() - - # Init export methods - exportUtils.load_export_methods() - - # Init widget configurations - widgetConfUtils.setup_widget_configurations() - - # Init layouts - layoutsUtils.setup_layouts() diff --git a/cspell.json b/cspell.json index 79eca028d..a9e00879c 100644 --- a/cspell.json +++ b/cspell.json @@ -66,8 +66,8 @@ "*.png", "*.yaml", ".vscode/*", - "backend/requirements.txt", - "backend/data/*", + "debiaiServer/requirements.txt", + "debiaiServer/data/*", "__pycache__/", "frontend/dist", "frontend/node_modules", diff --git a/debiaiServer/__init__.py b/debiaiServer/__init__.py new file mode 100644 index 000000000..39480a93c --- /dev/null +++ b/debiaiServer/__init__.py @@ -0,0 +1 @@ +from debiaiServer.websrv import send_frontend, create_app, start_server diff --git a/backend/config/__init__.py b/debiaiServer/config/__init__.py similarity index 100% rename from backend/config/__init__.py rename to debiaiServer/config/__init__.py diff --git a/backend/config/config.env b/debiaiServer/config/config.env similarity index 100% rename from backend/config/config.env rename to debiaiServer/config/config.env diff --git a/backend/config/config.ini b/debiaiServer/config/config.ini similarity index 100% rename from backend/config/config.ini rename to debiaiServer/config/config.ini diff --git a/backend/config/init_config.py b/debiaiServer/config/init_config.py similarity index 99% rename from backend/config/init_config.py rename to debiaiServer/config/init_config.py index bbb7e5959..2079dcc08 100644 --- a/backend/config/init_config.py +++ b/debiaiServer/config/init_config.py @@ -3,7 +3,7 @@ import os -config_path = "backend/config/config.ini" +config_path = "debiaiServer/config/config.ini" config_parser = ConfigParser() DEBUG_COLOR = "light_blue" diff --git a/backend/controller/__init__.py b/debiaiServer/controller/__init__.py similarity index 100% rename from backend/controller/__init__.py rename to debiaiServer/controller/__init__.py diff --git a/backend/controller/algoProviders.py b/debiaiServer/controller/algoProviders.py similarity index 85% rename from backend/controller/algoProviders.py rename to debiaiServer/controller/algoProviders.py index 3116cf164..b3075854a 100644 --- a/backend/controller/algoProviders.py +++ b/debiaiServer/controller/algoProviders.py @@ -1,11 +1,11 @@ ############################################################################# # Imports ############################################################################# -from backend.config.init_config import get_config -from backend.utils.utils import is_url_valid, is_valid_name -import backend.modules.algoProviders.algoProvidersManager as algo_provider_manager -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException -from backend.modules.algoProviders.AlgoProvider import AlgoProvider +from debiaiServer.config.init_config import get_config +from debiaiServer.utils.utils import is_url_valid, is_valid_name +import debiaiServer.modules.algoProviders.algoProvidersManager as algo_provider_manager +from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.modules.algoProviders.AlgoProvider import AlgoProvider ############################################################################# # Algo providers Management diff --git a/backend/controller/data.py b/debiaiServer/controller/data.py similarity index 86% rename from backend/controller/data.py rename to debiaiServer/controller/data.py index 420cb5d6e..12437a088 100644 --- a/backend/controller/data.py +++ b/debiaiServer/controller/data.py @@ -1,8 +1,8 @@ ############################################################################# # Imports ############################################################################# -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Data Management diff --git a/backend/controller/dataProviders.py b/debiaiServer/controller/dataProviders.py similarity index 88% rename from backend/controller/dataProviders.py rename to debiaiServer/controller/dataProviders.py index ae3cbaeff..0ceeceadb 100644 --- a/backend/controller/dataProviders.py +++ b/debiaiServer/controller/dataProviders.py @@ -1,13 +1,13 @@ ############################################################################# # Imports ############################################################################# -from backend.config.init_config import get_config -from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( +from debiaiServer.config.init_config import get_config +from debiaiServer.modules.dataProviders.webDataProvider.WebDataProvider import ( WebDataProvider, ) -from backend.utils.utils import is_url_valid -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.utils.utils import is_url_valid +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Data Providers Management diff --git a/backend/controller/exportMethods.py b/debiaiServer/controller/exportMethods.py similarity index 92% rename from backend/controller/exportMethods.py rename to debiaiServer/controller/exportMethods.py index 227c40767..966dad4e0 100644 --- a/backend/controller/exportMethods.py +++ b/debiaiServer/controller/exportMethods.py @@ -1,5 +1,5 @@ -from backend.config.init_config import get_config -import backend.modules.exportMethods.exportUtils as exportUtils +from debiaiServer.config.init_config import get_config +import debiaiServer.modules.exportMethods.exportUtils as exportUtils ############################################################################# # Export API Management diff --git a/backend/controller/layouts.py b/debiaiServer/controller/layouts.py similarity index 91% rename from backend/controller/layouts.py rename to debiaiServer/controller/layouts.py index da3962a15..0c2531014 100644 --- a/backend/controller/layouts.py +++ b/debiaiServer/controller/layouts.py @@ -1,7 +1,7 @@ ############################################################################# # Imports ############################################################################# -import backend.utils.layouts.layouts as layoutsUtils +import debiaiServer.utils.layouts.layouts as layoutsUtils ############################################################################# # Analysis dashboard layout Management diff --git a/backend/controller/models.py b/debiaiServer/controller/models.py similarity index 91% rename from backend/controller/models.py rename to debiaiServer/controller/models.py index 4137e33ea..160fd7170 100644 --- a/backend/controller/models.py +++ b/debiaiServer/controller/models.py @@ -2,8 +2,8 @@ # Imports ############################################################################# -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # MODELS Management diff --git a/backend/controller/projects.py b/debiaiServer/controller/projects.py similarity index 94% rename from backend/controller/projects.py rename to debiaiServer/controller/projects.py index fdfd26ee3..06bbb801d 100644 --- a/backend/controller/projects.py +++ b/debiaiServer/controller/projects.py @@ -1,8 +1,8 @@ ############################################################################# # Imports ############################################################################# -from backend.modules.dataProviders.DataProviderException import DataProviderException -import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager ############################################################################# # PROJECTS Management diff --git a/backend/controller/pythonModuleDp.py b/debiaiServer/controller/pythonModuleDp.py similarity index 92% rename from backend/controller/pythonModuleDp.py rename to debiaiServer/controller/pythonModuleDp.py index 432b57944..b9c8cae9b 100644 --- a/backend/controller/pythonModuleDp.py +++ b/debiaiServer/controller/pythonModuleDp.py @@ -1,5 +1,5 @@ -from backend.modules.dataProviders.DataProviderException import DataProviderException -import backend.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager # Project diff --git a/backend/controller/selection.py b/debiaiServer/controller/selection.py similarity index 90% rename from backend/controller/selection.py rename to debiaiServer/controller/selection.py index 2e5ddc0e1..9b22ca709 100644 --- a/backend/controller/selection.py +++ b/debiaiServer/controller/selection.py @@ -2,8 +2,8 @@ # Imports ############################################################################# -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException ############################################################################# # Selections Management diff --git a/backend/controller/statisticalOperations.py b/debiaiServer/controller/statisticalOperations.py similarity index 100% rename from backend/controller/statisticalOperations.py rename to debiaiServer/controller/statisticalOperations.py diff --git a/backend/controller/widgetConfigurations.py b/debiaiServer/controller/widgetConfigurations.py similarity index 91% rename from backend/controller/widgetConfigurations.py rename to debiaiServer/controller/widgetConfigurations.py index 90b3e3764..f552afb3f 100644 --- a/backend/controller/widgetConfigurations.py +++ b/debiaiServer/controller/widgetConfigurations.py @@ -1,7 +1,7 @@ ############################################################################# # Imports ############################################################################# -import backend.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils +import debiaiServer.utils.widgetConfigurations.widgetConfigurations as widgetConfUtils ############################################################################# # Widget configuration Management diff --git a/debiaiServer/init.py b/debiaiServer/init.py new file mode 100644 index 000000000..16305aba6 --- /dev/null +++ b/debiaiServer/init.py @@ -0,0 +1,38 @@ +from debiaiServer.modules.dataProviders import ( + dataProviderManager, +) +from debiaiServer.modules.exportMethods import ( + exportUtils, +) +from debiaiServer.modules.algoProviders import ( + algoProvidersManager, +) +from debiaiServer.utils.widgetConfigurations import ( + widgetConfigurations as widgetConfUtils, +) +from debiaiServer.utils.layouts import ( + layouts as layoutsUtils, +) +from debiaiServer.config import ( + init_config as config, +) + + +def init(): + # Init config file + config.init_config() + + # Init data providers + dataProviderManager.setup_data_providers() + + # Init AlgoProviders + algoProvidersManager.setup_algo_providers() + + # Init export methods + exportUtils.load_export_methods() + + # Init widget configurations + widgetConfUtils.setup_widget_configurations() + + # Init layouts + layoutsUtils.setup_layouts() diff --git a/backend/modules/__init__.py b/debiaiServer/modules/__init__.py similarity index 100% rename from backend/modules/__init__.py rename to debiaiServer/modules/__init__.py diff --git a/backend/modules/algoProviders/AlgoProvider.py b/debiaiServer/modules/algoProviders/AlgoProvider.py similarity index 97% rename from backend/modules/algoProviders/AlgoProvider.py rename to debiaiServer/modules/algoProviders/AlgoProvider.py index d446c655a..e48944d1d 100644 --- a/backend/modules/algoProviders/AlgoProvider.py +++ b/debiaiServer/modules/algoProviders/AlgoProvider.py @@ -1,7 +1,7 @@ # Class for AlgoProvider import requests import json -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException class AlgoProvider: diff --git a/backend/modules/algoProviders/AlgoProviderException.py b/debiaiServer/modules/algoProviders/AlgoProviderException.py similarity index 100% rename from backend/modules/algoProviders/AlgoProviderException.py rename to debiaiServer/modules/algoProviders/AlgoProviderException.py diff --git a/backend/modules/algoProviders/__init__.py b/debiaiServer/modules/algoProviders/__init__.py similarity index 100% rename from backend/modules/algoProviders/__init__.py rename to debiaiServer/modules/algoProviders/__init__.py diff --git a/backend/modules/algoProviders/algoProvidersManager.py b/debiaiServer/modules/algoProviders/algoProvidersManager.py similarity index 91% rename from backend/modules/algoProviders/algoProvidersManager.py rename to debiaiServer/modules/algoProviders/algoProvidersManager.py index 282023606..3d362a2b3 100644 --- a/backend/modules/algoProviders/algoProvidersManager.py +++ b/debiaiServer/modules/algoProviders/algoProvidersManager.py @@ -1,16 +1,16 @@ from termcolor import colored -from backend.config.init_config import ( +from debiaiServer.config.init_config import ( get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR, ) -from backend.modules.algoProviders.AlgoProviderException import ( +from debiaiServer.modules.algoProviders.AlgoProviderException import ( AlgoProviderException, ) # noqa -from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( # noqa +from debiaiServer.modules.algoProviders.AlgoProvider import AlgoProvider +from debiaiServer.modules.algoProviders.integratedAlgoProvider.integratedAlgoProvider import ( # noqa IntegratedAlgoProvider, ) diff --git a/backend/modules/algoProviders/integratedAlgoProvider/__init__.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/__init__.py similarity index 100% rename from backend/modules/algoProviders/integratedAlgoProvider/__init__.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/__init__.py diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py similarity index 100% rename from backend/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/__init__.py diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py similarity index 97% rename from backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py index db6e675ec..f3b83c3d3 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py @@ -1,4 +1,4 @@ -from backend.modules.algoProviders.integratedAlgoProvider.utils import ( +from debiaiServer.modules.algoProviders.integratedAlgoProvider.utils import ( get_input_from_inputs, ) diff --git a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py similarity index 98% rename from backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py index 035ad8a59..dca073db1 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py @@ -1,4 +1,4 @@ -from backend.modules.algoProviders.integratedAlgoProvider.utils import ( +from debiaiServer.modules.algoProviders.integratedAlgoProvider.utils import ( get_input_from_inputs, ) diff --git a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py similarity index 91% rename from backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py index 0fbc713ab..40483e69c 100644 --- a/backend/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -1,9 +1,9 @@ import os from termcolor import colored -from backend.config.init_config import DEBUG_COLOR -from backend.modules.algoProviders.AlgoProvider import AlgoProvider -from backend.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.config.init_config import DEBUG_COLOR +from debiaiServer.modules.algoProviders.AlgoProvider import AlgoProvider +from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException def _get_algorithm_python(algorithm_name): @@ -29,7 +29,7 @@ def _get_algorithm_python(algorithm_name): # Import the algorithm algorithm_python = __import__( - "backend.modules.algoProviders.integratedAlgoProvider.algorithms." + "debiaiServer.modules.algoProviders.integratedAlgoProvider.algorithms." + algorithm_file, fromlist=["*"], ) diff --git a/backend/modules/algoProviders/integratedAlgoProvider/utils.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/utils.py similarity index 100% rename from backend/modules/algoProviders/integratedAlgoProvider/utils.py rename to debiaiServer/modules/algoProviders/integratedAlgoProvider/utils.py diff --git a/backend/modules/dataProviders/DataProvider.py b/debiaiServer/modules/dataProviders/DataProvider.py similarity index 100% rename from backend/modules/dataProviders/DataProvider.py rename to debiaiServer/modules/dataProviders/DataProvider.py diff --git a/backend/modules/dataProviders/DataProviderException.py b/debiaiServer/modules/dataProviders/DataProviderException.py similarity index 100% rename from backend/modules/dataProviders/DataProviderException.py rename to debiaiServer/modules/dataProviders/DataProviderException.py diff --git a/backend/modules/dataProviders/__init__.py b/debiaiServer/modules/dataProviders/__init__.py similarity index 100% rename from backend/modules/dataProviders/__init__.py rename to debiaiServer/modules/dataProviders/__init__.py diff --git a/backend/modules/dataProviders/dataProviderManager.py b/debiaiServer/modules/dataProviders/dataProviderManager.py similarity index 91% rename from backend/modules/dataProviders/dataProviderManager.py rename to debiaiServer/modules/dataProviders/dataProviderManager.py index 2b651dbb7..67ddbdc59 100644 --- a/backend/modules/dataProviders/dataProviderManager.py +++ b/debiaiServer/modules/dataProviders/dataProviderManager.py @@ -1,19 +1,19 @@ from termcolor import colored -from backend.config.init_config import ( +from debiaiServer.config.init_config import ( get_config, DEBUG_COLOR, ERROR_COLOR, SUCCESS_COLOR, ) -from backend.modules.dataProviders.webDataProvider.WebDataProvider import ( +from debiaiServer.modules.dataProviders.webDataProvider.WebDataProvider import ( WebDataProvider, ) -from backend.modules.dataProviders.pythonDataProvider.PythonDataProvider import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.PythonDataProvider import ( PythonDataProvider, PYTHON_DATA_PROVIDER_ID, ) -from backend.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException data_providers_list = [] python_data_provider_disabled = True diff --git a/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py b/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py similarity index 96% rename from backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py index c143d7a76..ef017dd53 100644 --- a/backend/modules/dataProviders/pythonDataProvider/PythonDataProvider.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py @@ -1,7 +1,7 @@ -from backend.config.init_config import get_config -from backend.modules.dataProviders.DataProvider import DataProvider -from backend.modules.dataProviders.DataProviderException import DataProviderException -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.config.init_config import get_config +from debiaiServer.modules.dataProviders.DataProvider import DataProvider +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, samples, @@ -10,7 +10,7 @@ tree, ) -from backend.utils.utils import get_app_version +from debiaiServer.utils.utils import get_app_version PYTHON_DATA_PROVIDER_ID = "Python module Data Provider" diff --git a/backend/modules/dataProviders/pythonDataProvider/__init__.py b/debiaiServer/modules/dataProviders/pythonDataProvider/__init__.py similarity index 100% rename from backend/modules/dataProviders/pythonDataProvider/__init__.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/__init__.py diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py similarity index 100% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/__init__.py diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py similarity index 94% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py index 844a309fa..8459eb6bc 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/hash.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py @@ -1,7 +1,7 @@ import hashlib import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py similarity index 98% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py index 075df2087..d61210a8f 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/models.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py @@ -1,11 +1,11 @@ import os import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, tree, ) -from backend.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py similarity index 99% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py index a5ffa511f..b1185dd68 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py @@ -2,7 +2,7 @@ import shutil import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, hash, ) diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py similarity index 100% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/samples.py similarity index 98% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/samples.py index 6ee3676ef..9a47ec3a5 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/samples.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/samples.py @@ -1,4 +1,4 @@ -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, tree, hash, diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/selections.py similarity index 97% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/selections.py index 8281886ef..433a2735d 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/selections.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/selections.py @@ -1,7 +1,7 @@ import os import ujson as json -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, ) diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tags.py similarity index 97% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tags.py index 425313aba..b32a559c5 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tags.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tags.py @@ -1,5 +1,5 @@ import os -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, hash, ) diff --git a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tree.py similarity index 99% rename from backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py rename to debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tree.py index 08fec8241..41ac9a9c6 100644 --- a/backend/modules/dataProviders/pythonDataProvider/dataUtils/tree.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/tree.py @@ -1,7 +1,7 @@ import ujson as json import os -from backend.modules.dataProviders.pythonDataProvider.dataUtils import ( +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, models, diff --git a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py b/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py similarity index 84% rename from backend/modules/dataProviders/webDataProvider/WebDataProvider.py rename to debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py index 0f2d10678..353b9dfa9 100644 --- a/backend/modules/dataProviders/webDataProvider/WebDataProvider.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -1,22 +1,22 @@ -from backend.modules.dataProviders.DataProvider import DataProvider -from backend.modules.dataProviders.webDataProvider.useCases.data import ( +from debiaiServer.modules.dataProviders.DataProvider import DataProvider +from debiaiServer.modules.dataProviders.webDataProvider.useCases.data import ( get_project_id_list, get_project_samples, ) -from backend.modules.dataProviders.webDataProvider.useCases.projects import ( +from debiaiServer.modules.dataProviders.webDataProvider.useCases.projects import ( get_all_projects_from_data_provider, get_single_project_from_data_provider, delete_project, ) -from backend.modules.dataProviders.webDataProvider.useCases.models import ( +from debiaiServer.modules.dataProviders.webDataProvider.useCases.models import ( get_model_results, get_models_info, get_model_result_id, delete_model, ) -import backend.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa -from backend.modules.dataProviders.webDataProvider.http.api import get_info, get_status -from backend.modules.dataProviders.webDataProvider.cache.cache import Cache +import debiaiServer.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa +from debiaiServer.modules.dataProviders.webDataProvider.http.api import get_info, get_status +from debiaiServer.modules.dataProviders.webDataProvider.cache.cache import Cache # WebDataProvider class, allow to get data from a web data-provider diff --git a/backend/modules/dataProviders/webDataProvider/__init__.py b/debiaiServer/modules/dataProviders/webDataProvider/__init__.py similarity index 100% rename from backend/modules/dataProviders/webDataProvider/__init__.py rename to debiaiServer/modules/dataProviders/webDataProvider/__init__.py diff --git a/backend/modules/dataProviders/webDataProvider/cache/__init__.py b/debiaiServer/modules/dataProviders/webDataProvider/cache/__init__.py similarity index 100% rename from backend/modules/dataProviders/webDataProvider/cache/__init__.py rename to debiaiServer/modules/dataProviders/webDataProvider/cache/__init__.py diff --git a/backend/modules/dataProviders/webDataProvider/cache/cache.py b/debiaiServer/modules/dataProviders/webDataProvider/cache/cache.py similarity index 98% rename from backend/modules/dataProviders/webDataProvider/cache/cache.py rename to debiaiServer/modules/dataProviders/webDataProvider/cache/cache.py index 68b2c05f7..3673ea5e4 100644 --- a/backend/modules/dataProviders/webDataProvider/cache/cache.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/cache/cache.py @@ -3,7 +3,7 @@ # It will mainly save the id list of samples, selections and models results # The ability to cache and the time to live are configurable in the config file -from backend.config.init_config import get_config +from debiaiServer.config.init_config import get_config from cacheout import Cache as CacheoutCache diff --git a/backend/modules/dataProviders/webDataProvider/http/__init__.py b/debiaiServer/modules/dataProviders/webDataProvider/http/__init__.py similarity index 100% rename from backend/modules/dataProviders/webDataProvider/http/__init__.py rename to debiaiServer/modules/dataProviders/webDataProvider/http/__init__.py diff --git a/backend/modules/dataProviders/webDataProvider/http/api.py b/debiaiServer/modules/dataProviders/webDataProvider/http/api.py similarity index 98% rename from backend/modules/dataProviders/webDataProvider/http/api.py rename to debiaiServer/modules/dataProviders/webDataProvider/http/api.py index 1b32e7fa4..3aee3a731 100644 --- a/backend/modules/dataProviders/webDataProvider/http/api.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/http/api.py @@ -1,6 +1,6 @@ import requests import json -from backend.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException # Todo : change info if in not alive anymore diff --git a/backend/modules/dataProviders/webDataProvider/useCases/__init__.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/__init__.py similarity index 100% rename from backend/modules/dataProviders/webDataProvider/useCases/__init__.py rename to debiaiServer/modules/dataProviders/webDataProvider/useCases/__init__.py diff --git a/backend/modules/dataProviders/webDataProvider/useCases/data.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/data.py similarity index 89% rename from backend/modules/dataProviders/webDataProvider/useCases/data.py rename to debiaiServer/modules/dataProviders/webDataProvider/useCases/data.py index eb3ba8d63..05feef21d 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/data.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/data.py @@ -1,4 +1,4 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api +import debiaiServer.modules.dataProviders.webDataProvider.http.api as api # # UseCase folder role is the middleware between class methods and http requests diff --git a/backend/modules/dataProviders/webDataProvider/useCases/models.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py similarity index 91% rename from backend/modules/dataProviders/webDataProvider/useCases/models.py rename to debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py index 6b646b261..655cf1fc2 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/models.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py @@ -1,5 +1,5 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.webDataProvider.http.api as api +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException def get_models_info(url, project_id): diff --git a/backend/modules/dataProviders/webDataProvider/useCases/projects.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/projects.py similarity index 94% rename from backend/modules/dataProviders/webDataProvider/useCases/projects.py rename to debiaiServer/modules/dataProviders/webDataProvider/useCases/projects.py index 03f5dcae2..0a610803e 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/projects.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/projects.py @@ -1,9 +1,9 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api +import debiaiServer.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.webDataProvider.useCases.models import ( +from debiaiServer.modules.dataProviders.webDataProvider.useCases.models import ( get_models_info, ) -from backend.modules.dataProviders.webDataProvider.useCases.selections import ( +from debiaiServer.modules.dataProviders.webDataProvider.useCases.selections import ( get_project_selections, ) diff --git a/backend/modules/dataProviders/webDataProvider/useCases/selections.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py similarity index 93% rename from backend/modules/dataProviders/webDataProvider/useCases/selections.py rename to debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py index 6caa2f74d..6d3bd8723 100644 --- a/backend/modules/dataProviders/webDataProvider/useCases/selections.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py @@ -1,5 +1,5 @@ -import backend.modules.dataProviders.webDataProvider.http.api as api -from backend.modules.dataProviders.DataProviderException import DataProviderException +import debiaiServer.modules.dataProviders.webDataProvider.http.api as api +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException def get_project_selections(url, project_id): diff --git a/backend/modules/exportMethods/__init__.py b/debiaiServer/modules/exportMethods/__init__.py similarity index 100% rename from backend/modules/exportMethods/__init__.py rename to debiaiServer/modules/exportMethods/__init__.py diff --git a/backend/modules/exportMethods/exportClass.py b/debiaiServer/modules/exportMethods/exportClass.py similarity index 100% rename from backend/modules/exportMethods/exportClass.py rename to debiaiServer/modules/exportMethods/exportClass.py diff --git a/backend/modules/exportMethods/exportUtils.py b/debiaiServer/modules/exportMethods/exportUtils.py similarity index 93% rename from backend/modules/exportMethods/exportUtils.py rename to debiaiServer/modules/exportMethods/exportUtils.py index d7502bcd6..4f136d567 100644 --- a/backend/modules/exportMethods/exportUtils.py +++ b/debiaiServer/modules/exportMethods/exportUtils.py @@ -1,10 +1,10 @@ -from backend.config.init_config import get_config -import backend.modules.dataProviders.dataProviderManager as data_provider_manager -from backend.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.config.init_config import get_config +import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager +from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException import time -from backend.modules.exportMethods.methods.kafkaUtils import KafkaExportType -from backend.modules.exportMethods.methods.postUtils import PostExportType +from debiaiServer.modules.exportMethods.methods.kafkaUtils import KafkaExportType +from debiaiServer.modules.exportMethods.methods.postUtils import PostExportType ############################################################################# # diff --git a/backend/modules/exportMethods/methods/__init__.py b/debiaiServer/modules/exportMethods/methods/__init__.py similarity index 100% rename from backend/modules/exportMethods/methods/__init__.py rename to debiaiServer/modules/exportMethods/methods/__init__.py diff --git a/backend/modules/exportMethods/methods/kafkaUtils.py b/debiaiServer/modules/exportMethods/methods/kafkaUtils.py similarity index 96% rename from backend/modules/exportMethods/methods/kafkaUtils.py rename to debiaiServer/modules/exportMethods/methods/kafkaUtils.py index 09c708df1..67e1b3f71 100644 --- a/backend/modules/exportMethods/methods/kafkaUtils.py +++ b/debiaiServer/modules/exportMethods/methods/kafkaUtils.py @@ -1,5 +1,5 @@ from kafka import KafkaProducer -from backend.modules.exportMethods.exportClass import ExportType, ExportMethod +from debiaiServer.modules.exportMethods.exportClass import ExportType, ExportMethod import json ############################################################################# diff --git a/backend/modules/exportMethods/methods/postUtils.py b/debiaiServer/modules/exportMethods/methods/postUtils.py similarity index 95% rename from backend/modules/exportMethods/methods/postUtils.py rename to debiaiServer/modules/exportMethods/methods/postUtils.py index a37d18aff..0e80287a3 100644 --- a/backend/modules/exportMethods/methods/postUtils.py +++ b/debiaiServer/modules/exportMethods/methods/postUtils.py @@ -1,4 +1,4 @@ -from backend.modules.exportMethods.exportClass import ExportType, ExportMethod +from debiaiServer.modules.exportMethods.exportClass import ExportType, ExportMethod import requests ############################################################################# diff --git a/backend/requirements.txt b/debiaiServer/requirements.txt similarity index 100% rename from backend/requirements.txt rename to debiaiServer/requirements.txt diff --git a/backend/server.py b/debiaiServer/server.py similarity index 64% rename from backend/server.py rename to debiaiServer/server.py index 22884de8a..a06135426 100644 --- a/backend/server.py +++ b/debiaiServer/server.py @@ -1,5 +1,5 @@ from threading import Timer -from backend.websrv import start_server, open_browser +from debiaiServer.websrv import start_server, open_browser def run(): diff --git a/backend/setup.cfg b/debiaiServer/setup.cfg similarity index 100% rename from backend/setup.cfg rename to debiaiServer/setup.cfg diff --git a/backend/swagger.yaml b/debiaiServer/swagger.yaml similarity index 93% rename from backend/swagger.yaml rename to debiaiServer/swagger.yaml index 212489b8f..eebd8b772 100644 --- a/backend/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,7 +1,7 @@ swagger: "2.0" info: version: 0.27.1 - title: DebiAI_BACKEND_API + title: DebiAI_debiaiServer_API description: DebiAI backend api contact: email: debiai@irt-systemx.fr @@ -11,8 +11,8 @@ info: paths: /version: get: - summary: Ping to check if the backend is running - operationId: backend.controller.projects.ping + summary: Ping to check if the debiaiServer is running + operationId: debiaiServer.controller.projects.ping responses: 200: description: The server is online @@ -22,7 +22,7 @@ paths: get: summary: Get data providers list and status tags: [Data Providers] - operationId: backend.controller.dataProviders.get_data_providers + operationId: debiaiServer.controller.dataProviders.get_data_providers responses: 200: description: List of data providers @@ -34,7 +34,7 @@ paths: post: summary: Add data provider to data providers list tags: [Data Providers] - operationId: backend.controller.dataProviders.post_data_providers + operationId: debiaiServer.controller.dataProviders.post_data_providers parameters: - name: data in: body @@ -63,7 +63,7 @@ paths: delete: summary: Delete data providers from the list tags: [Data Providers] - operationId: backend.controller.dataProviders.delete_data_providers + operationId: debiaiServer.controller.dataProviders.delete_data_providers parameters: - name: dataProviderId in: path @@ -80,7 +80,7 @@ paths: get: summary: Get general informations about a data provider, like his version or the max number sample for each type of request tags: [Data Providers] - operationId: backend.controller.dataProviders.get_data_provider_info + operationId: debiaiServer.controller.dataProviders.get_data_provider_info parameters: - name: dataProviderId in: path @@ -126,7 +126,7 @@ paths: get: summary: Get the projects overview tags: [Project] - operationId: backend.controller.projects.get_projects + operationId: debiaiServer.controller.projects.get_projects responses: 200: description: List of project overviews @@ -138,7 +138,7 @@ paths: post: summary: Post a new project tags: [Project] - operationId: backend.controller.pythonModuleDp.post_project + operationId: debiaiServer.controller.pythonModuleDp.post_project parameters: - name: data in: body @@ -179,7 +179,7 @@ paths: get: summary: Get the projects overview for a data provider tags: [Project] - operationId: backend.controller.projects.get_data_providers_project + operationId: debiaiServer.controller.projects.get_data_providers_project parameters: - name: dataProviderId in: path @@ -197,7 +197,7 @@ paths: get: summary: Get project name, nb of models & nb of selections (overviews of a project) tags: [Project] - operationId: backend.controller.projects.get_project + operationId: debiaiServer.controller.projects.get_project parameters: - name: dataProviderId in: path @@ -216,7 +216,7 @@ paths: delete: summary: remove a project from ID tags: [Project] - operationId: backend.controller.projects.delete_project + operationId: debiaiServer.controller.projects.delete_project parameters: - name: dataProviderId in: path @@ -236,7 +236,7 @@ paths: post: summary: Get the project data id list tags: [Project] - operationId: backend.controller.projects.get_data_id_list + operationId: debiaiServer.controller.projects.get_data_id_list parameters: - name: dataProviderId in: path @@ -290,7 +290,7 @@ paths: post: summary: add a new data blocks level structure tags: [Project] - operationId: backend.controller.pythonModuleDp.post_block_levels + operationId: debiaiServer.controller.pythonModuleDp.post_block_levels parameters: - name: dataProviderId in: path @@ -348,7 +348,7 @@ paths: post: summary: add a new expected results structure tags: [Project] - operationId: backend.controller.pythonModuleDp.post_resultsStructure + operationId: debiaiServer.controller.pythonModuleDp.post_resultsStructure parameters: - name: dataProviderId in: path @@ -399,7 +399,7 @@ paths: post: summary: add a model tags: [Model] - operationId: backend.controller.models.post_model + operationId: debiaiServer.controller.models.post_model parameters: - name: dataProviderId in: path @@ -436,7 +436,7 @@ paths: get: summary: Get a model results id list tags: [Model] - operationId: backend.controller.models.get_model_id_list + operationId: debiaiServer.controller.models.get_model_id_list parameters: - name: dataProviderId in: path @@ -463,7 +463,7 @@ paths: delete: summary: remove a model tags: [Model] - operationId: backend.controller.models.delete_model + operationId: debiaiServer.controller.models.delete_model parameters: - name: dataProviderId in: path @@ -487,7 +487,7 @@ paths: : post: summary: Add a results to a model tags: [Model] - operationId: backend.controller.pythonModuleDp.add_results_dict + operationId: debiaiServer.controller.pythonModuleDp.add_results_dict parameters: - name: dataProviderId in: path @@ -528,7 +528,7 @@ paths: : post: summary: Get the model results from a sample list tags: [Model] - operationId: backend.controller.models.get_results + operationId: debiaiServer.controller.models.get_results parameters: - name: dataProviderId in: path @@ -570,7 +570,7 @@ paths: post: summary: add a tree to an existing project block tree tags: [Block] - operationId: backend.controller.pythonModuleDp.post_block_tree + operationId: debiaiServer.controller.pythonModuleDp.post_block_tree parameters: - name: dataProviderId in: path @@ -605,7 +605,7 @@ paths: post: summary: get a project tree form a sample list tags: [Block] - operationId: backend.controller.data.get_data + operationId: debiaiServer.controller.data.get_data parameters: - name: dataProviderId in: path @@ -647,7 +647,7 @@ paths: get: summary: Get the project selections tags: [Selection] - operationId: backend.controller.selection.get_selections + operationId: debiaiServer.controller.selection.get_selections parameters: - name: dataProviderId in: path @@ -668,7 +668,7 @@ paths: post: summary: add a selection tags: [Selection] - operationId: backend.controller.selection.post_selection + operationId: debiaiServer.controller.selection.post_selection parameters: - name: dataProviderId in: path @@ -710,7 +710,7 @@ paths: : get: summary: Get a project selection id list tags: [Selection] - operationId: backend.controller.selection.get_selection_id_list + operationId: debiaiServer.controller.selection.get_selection_id_list parameters: - name: dataProviderId in: path @@ -737,7 +737,7 @@ paths: delete: summary: delete a selection tags: [Selection] - operationId: backend.controller.selection.delete_selection + operationId: debiaiServer.controller.selection.delete_selection parameters: - name: dataProviderId in: path @@ -760,7 +760,7 @@ paths: get: summary: Get all layouts tags: [Layouts] - operationId: backend.controller.layouts.get_layouts + operationId: debiaiServer.controller.layouts.get_layouts responses: 200: description: Layouts for all projects @@ -772,7 +772,7 @@ paths: post: summary: Add a layout tags: [Layouts] - operationId: backend.controller.layouts.post_layout + operationId: debiaiServer.controller.layouts.post_layout parameters: - name: data in: body @@ -819,7 +819,7 @@ paths: delete: summary: Delete a layout tags: [Layouts] - operationId: backend.controller.layouts.delete_layout + operationId: debiaiServer.controller.layouts.delete_layout parameters: - name: id in: path @@ -839,7 +839,7 @@ paths: summary: Get all widget configurations overview, return the number of configurations for each widget tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.get_all_configurations + operationId: debiaiServer.controller.widgetConfigurations.get_all_configurations responses: 200: description: Widget configurations number for each widget @@ -855,7 +855,7 @@ paths: get: summary: Get the widget configurations tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.get_widget_configurations + operationId: debiaiServer.controller.widgetConfigurations.get_widget_configurations parameters: - name: widgetKey in: path @@ -897,7 +897,7 @@ paths: post: summary: Add a widget configuration tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.post_configuration + operationId: debiaiServer.controller.widgetConfigurations.post_configuration parameters: - name: widgetKey in: path @@ -942,7 +942,7 @@ paths: delete: summary: Delete a widget configuration tags: [Widget configurations] - operationId: backend.controller.widgetConfigurations.delete_configuration + operationId: debiaiServer.controller.widgetConfigurations.delete_configuration parameters: - name: widgetKey in: path @@ -966,7 +966,7 @@ paths: get: summary: Get the application export methods tags: [Export] - operationId: backend.controller.exportMethods.get_export_methods + operationId: debiaiServer.controller.exportMethods.get_export_methods responses: 200: description: Export method list @@ -997,7 +997,7 @@ paths: post: summary: Create an export method for the app tags: [Export] - operationId: backend.controller.exportMethods.post_export_method + operationId: debiaiServer.controller.exportMethods.post_export_method parameters: - name: data in: body @@ -1029,7 +1029,7 @@ paths: delete: summary: Remove an export method for the app tags: [Export] - operationId: backend.controller.exportMethods.delete_export_method + operationId: debiaiServer.controller.exportMethods.delete_export_method parameters: - name: exportMethodId in: path @@ -1045,7 +1045,7 @@ paths: post: summary: Export data with an export method tags: [Export] - operationId: backend.controller.exportMethods.exportData + operationId: debiaiServer.controller.exportMethods.exportData parameters: - name: exportMethodId in: path @@ -1063,7 +1063,7 @@ paths: post: summary: Export a selected sample id list from an export method tags: [Export] - operationId: backend.controller.exportMethods.exportSelection + operationId: debiaiServer.controller.exportMethods.exportSelection parameters: - name: dataProviderId in: path @@ -1103,7 +1103,7 @@ paths: get: summary: Get all Algo providers and their algorithms tags: [AlgoProviders] - operationId: backend.controller.algoProviders.get_algo_providers + operationId: debiaiServer.controller.algoProviders.get_algo_providers responses: 200: description: Algorithms list @@ -1116,7 +1116,7 @@ paths: post: summary: Add an Algo provider tags: [AlgoProviders] - operationId: backend.controller.algoProviders.post_algo_provider + operationId: debiaiServer.controller.algoProviders.post_algo_provider parameters: - name: data in: body @@ -1144,7 +1144,7 @@ paths: delete: summary: Delete an Algo provider tags: [AlgoProviders] - operationId: backend.controller.algoProviders.delete_algo_provider + operationId: debiaiServer.controller.algoProviders.delete_algo_provider parameters: - name: name in: path @@ -1162,7 +1162,7 @@ paths: post: summary: Use an algorithm of an Algo provider tags: [AlgoProviders] - operationId: backend.controller.algoProviders.use_algo + operationId: debiaiServer.controller.algoProviders.use_algo parameters: - name: algoProviderName in: path @@ -1229,7 +1229,7 @@ paths: post: summary: Calculate pearson correlation between rows tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.pearsonCorrelation + operationId: debiaiServer.controller.statisticalOperations.pearsonCorrelation parameters: - name: data in: body @@ -1260,7 +1260,7 @@ paths: post: summary: Calculate spearman correlation between rows tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.spearmanCorrelation + operationId: debiaiServer.controller.statisticalOperations.spearmanCorrelation parameters: - name: data in: body @@ -1291,7 +1291,7 @@ paths: post: summary: Calculate mutual informations tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.mutualInformation + operationId: debiaiServer.controller.statisticalOperations.mutualInformation parameters: - name: data in: body @@ -1343,7 +1343,7 @@ paths: post: summary: Calculate the mutual information between variables tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.higherDimensionMutualInformation + operationId: debiaiServer.controller.statisticalOperations.higherDimensionMutualInformation parameters: - name: data in: body @@ -1380,7 +1380,7 @@ paths: post: summary: Calculate matrix mutual informations and the higher Dimension tags: [Statistical operations] - operationId: backend.controller.statisticalOperations.mutualAndHigherInformation + operationId: debiaiServer.controller.statisticalOperations.mutualAndHigherInformation parameters: - name: data in: body diff --git a/backend/tests/README.md b/debiaiServer/tests/README.md similarity index 100% rename from backend/tests/README.md rename to debiaiServer/tests/README.md diff --git a/backend/tests/__init__.py b/debiaiServer/tests/__init__.py similarity index 100% rename from backend/tests/__init__.py rename to debiaiServer/tests/__init__.py diff --git a/backend/tests/test_algo_providers.py b/debiaiServer/tests/test_algo_providers.py similarity index 100% rename from backend/tests/test_algo_providers.py rename to debiaiServer/tests/test_algo_providers.py diff --git a/backend/tests/test_data_providers.py b/debiaiServer/tests/test_data_providers.py similarity index 100% rename from backend/tests/test_data_providers.py rename to debiaiServer/tests/test_data_providers.py diff --git a/backend/tests/test_layouts.py b/debiaiServer/tests/test_layouts.py similarity index 100% rename from backend/tests/test_layouts.py rename to debiaiServer/tests/test_layouts.py diff --git a/backend/tests/test_pythonModuleDataProvider.py b/debiaiServer/tests/test_pythonModuleDataProvider.py similarity index 100% rename from backend/tests/test_pythonModuleDataProvider.py rename to debiaiServer/tests/test_pythonModuleDataProvider.py diff --git a/backend/tests/test_widget_configurations.py b/debiaiServer/tests/test_widget_configurations.py similarity index 100% rename from backend/tests/test_widget_configurations.py rename to debiaiServer/tests/test_widget_configurations.py diff --git a/backend/utils/__init__.py b/debiaiServer/utils/__init__.py similarity index 100% rename from backend/utils/__init__.py rename to debiaiServer/utils/__init__.py diff --git a/backend/utils/layouts/__init__.py b/debiaiServer/utils/layouts/__init__.py similarity index 100% rename from backend/utils/layouts/__init__.py rename to debiaiServer/utils/layouts/__init__.py diff --git a/backend/utils/layouts/layouts.py b/debiaiServer/utils/layouts/layouts.py similarity index 98% rename from backend/utils/layouts/layouts.py rename to debiaiServer/utils/layouts/layouts.py index 0fdc654e3..dcce4155b 100644 --- a/backend/utils/layouts/layouts.py +++ b/debiaiServer/utils/layouts/layouts.py @@ -1,6 +1,6 @@ import os import json -import backend.utils.utils as utils +import debiaiServer.utils.utils as utils import uuid diff --git a/backend/utils/utils.py b/debiaiServer/utils/utils.py similarity index 94% rename from backend/utils/utils.py rename to debiaiServer/utils/utils.py index 2849502f3..01840372a 100644 --- a/backend/utils/utils.py +++ b/debiaiServer/utils/utils.py @@ -7,7 +7,7 @@ def get_app_version(): # Read the version from the API YAML file try: - with open("backend/swagger.yaml") as f: + with open("debiaiServer/swagger.yaml") as f: data = yaml.load(f, Loader=SafeLoader) return data["info"]["version"] except Exception as e: diff --git a/backend/utils/widgetConfigurations/__init__.py b/debiaiServer/utils/widgetConfigurations/__init__.py similarity index 100% rename from backend/utils/widgetConfigurations/__init__.py rename to debiaiServer/utils/widgetConfigurations/__init__.py diff --git a/backend/utils/widgetConfigurations/widgetConfigurations.py b/debiaiServer/utils/widgetConfigurations/widgetConfigurations.py similarity index 98% rename from backend/utils/widgetConfigurations/widgetConfigurations.py rename to debiaiServer/utils/widgetConfigurations/widgetConfigurations.py index 8fddf1d59..5330e50bb 100644 --- a/backend/utils/widgetConfigurations/widgetConfigurations.py +++ b/debiaiServer/utils/widgetConfigurations/widgetConfigurations.py @@ -1,6 +1,6 @@ import os import json -import backend.utils.utils as utils +import debiaiServer.utils.utils as utils import uuid CONF_PATH = "data/widgetConfigurations.json" diff --git a/backend/websrv.py b/debiaiServer/websrv.py similarity index 95% rename from backend/websrv.py rename to debiaiServer/websrv.py index b90720625..2b0485209 100644 --- a/backend/websrv.py +++ b/debiaiServer/websrv.py @@ -7,9 +7,9 @@ from threading import Timer from flask_cors import CORS from flask import send_from_directory, request, Response -from backend.init import init -from backend.utils.utils import get_app_version -from backend.config.init_config import DEBUG_COLOR +from debiaiServer.init import init +from debiaiServer.utils.utils import get_app_version +from debiaiServer.config.init_config import DEBUG_COLOR DEV_FRONTEND_URL = "http://localhost:8080/" PORT = 3000 diff --git a/docker-compose-build.yml b/docker-compose-build.yml index 9766d97aa..88bb570c5 100644 --- a/docker-compose-build.yml +++ b/docker-compose-build.yml @@ -6,7 +6,7 @@ services: ports: - "3000:3000" volumes: - - ./debiai_data:/backend/data + - ./debiai_data:/debiaiServer/data environment: # Data providers (DEBIAI_DATA_PROVIDER_=) Ref: https://debiai.irt-systemx.fr/dataInsertion/dataProviders/ # - DEBIAI_WEB_DATA_PROVIDER_my-web-provider1=http://localhost:3010/debiai diff --git a/makefile b/makefile index 548102a98..b95e5c699 100644 --- a/makefile +++ b/makefile @@ -2,38 +2,38 @@ # Install dependencies install: - cd backend && pip install -r requirements.txt + cd debiaiServer && pip install -r requirements.txt cd frontend && npm install # Run the application in development mode -run_backend: - cd backend && python websrv.py +run_debiaiServer: + cd debiaiServer && python websrv.py run_frontend: cd frontend && npm run serve start: - make run_backend & make run_frontend + make run_debiaiServer & make run_frontend code: - code backend + code debiaiServer code frontend # Code quality format: # ----- Formatting Python code with Black - cd backend && black . + cd debiaiServer && black . # ----- Formatting JavaScript code with Prettier cd frontend && npm run prettier check: # ----- Validating Black code style - cd backend && black --check --diff . + cd debiaiServer && black --check --diff . # ----- Validating Flake8 code style - cd backend && flake8 . + cd debiaiServer && flake8 . # ----- Validating Prettier code style cd frontend && npm run prettier:check diff --git a/setup.py b/setup.py index 58cca091f..4b2d0fb5d 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,9 @@ from setuptools import setup, find_packages setup( - name="backend", + name="debiai server", version="0.1.0", - packages=find_packages(include=["backend", "backend.*"]), + packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, install_requires=[ "Flask==2.0.3", @@ -24,7 +24,7 @@ ], entry_points={ "console_scripts": [ - "debiai-start=backend.server:run", + "debiai-start=debiaiServer.server:run", ], }, author="IRT-Systemx", From 274d91eff4146cdcb30a5fcd2166fbd636a1f811 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 10:58:34 +0200 Subject: [PATCH 23/97] new workflow pipeline for debiai gui --- .github/workflows/debiai-server-publish.yml | 38 +++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .github/workflows/debiai-server-publish.yml diff --git a/.github/workflows/debiai-server-publish.yml b/.github/workflows/debiai-server-publish.yml new file mode 100644 index 000000000..8eeb740dc --- /dev/null +++ b/.github/workflows/debiai-server-publish.yml @@ -0,0 +1,38 @@ +# This workflow will upload a Debiai-server Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: build + +on: + push: + branches: + - main + - 205-debiai-easy-start-module-to-launch-debiai-standlalone +jobs: + deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: "3.x" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build wheel + - name: Build + run: | + python setup.py sdist bdist_wheel + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + packages_dir: dist From b0735c573e8c16c06cc29788277bb58fe3d1b606 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 11:27:33 +0200 Subject: [PATCH 24/97] setuptools added to pipeline --- .github/workflows/debiai-server-publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/debiai-server-publish.yml b/.github/workflows/debiai-server-publish.yml index 8eeb740dc..9999118f5 100644 --- a/.github/workflows/debiai-server-publish.yml +++ b/.github/workflows/debiai-server-publish.yml @@ -26,6 +26,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip + pip install setuptools wheel pip install build wheel - name: Build run: | From 7ad561975cb5ba83c1262dc1f2a41664f8fcc06d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 13:20:50 +0200 Subject: [PATCH 25/97] new name --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4b2d0fb5d..bdc6b276d 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages setup( - name="debiai server", + name="debiai-gui", version="0.1.0", packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, From a923319309090c1eb200272812e129614541ca74 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 14:19:27 +0200 Subject: [PATCH 26/97] version check for back and front and rename --- .github/workflows/debiai-gui-publish.yml | 95 +++++++++++++++++++++ .github/workflows/debiai-server-publish.yml | 39 --------- 2 files changed, 95 insertions(+), 39 deletions(-) create mode 100644 .github/workflows/debiai-gui-publish.yml delete mode 100644 .github/workflows/debiai-server-publish.yml diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml new file mode 100644 index 000000000..d4c032867 --- /dev/null +++ b/.github/workflows/debiai-gui-publish.yml @@ -0,0 +1,95 @@ +# This workflow will upload a Debiai-gui Package using Twine when a release is created +# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: build + +on: + push: + branches: + - main + - 205-debiai-easy-start-module-to-launch-debiai-standlalone +jobs: + version-upgrade-check: # Check that the version is greater than the previous commit version + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + + - name: Check that the version in backend and frontend are the same + id: version-check + run: | + cd frontend + FRONTEND_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) + cd ../backend + BACKEND_VERSION=$(cat swagger.yaml | grep -m1 version | cut -d ':' -f 2 | sed 's/ //g') + if [ "$FRONTEND_VERSION" != "$BACKEND_VERSION" ]; then + echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != backend/swagger.yaml version '$BACKEND_VERSION'." + exit 1 + fi + echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == backend/swagger.yaml version '$BACKEND_VERSION'." + echo "BRANCH_VERSION=$FRONTEND_VERSION" >> $GITHUB_OUTPUT + + - uses: actions/checkout@v3 + with: + ref: main + + - name: Check that the version is greater than the previous commit version + run: | + BRANCH_VERSION=${{ steps.version-check.outputs.BRANCH_VERSION }} + cd frontend + PREVIOUS_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) + + echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" + echo "BRANCH_VERSION=$BRANCH_VERSION" + + # If pervious version is beta, ignore test + if [[ $PREVIOUS_VERSION == *"beta"* ]]; then + echo "Beta version detected, skipping version upgrade check." + exit 0 + fi + + if [ "$BRANCH_VERSION" == "" ]; then + echo "No version found in current branch." + exit 1 + fi + + if [ "$PREVIOUS_VERSION" == "" ]; then + echo "No version found in main branch." + exit 1 + fi + + if [[ $PREVIOUS_VERSION == $BRANCH_VERSION ]]; then + echo "Version not upgraded: frontend/package.json version '$PREVIOUS_VERSION' == branch version '$BRANCH_VERSION'." + exit 1 + fi + + if [[ $PREVIOUS_VERSION > $BRANCH_VERSION ]]; then + echo "Version not upgraded: frontend/package.json version '$PREVIOUS + deploy: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: "3.x" + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel + pip install build wheel + - name: Build + run: | + python setup.py sdist bdist_wheel + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} + packages_dir: dist diff --git a/.github/workflows/debiai-server-publish.yml b/.github/workflows/debiai-server-publish.yml deleted file mode 100644 index 9999118f5..000000000 --- a/.github/workflows/debiai-server-publish.yml +++ /dev/null @@ -1,39 +0,0 @@ -# This workflow will upload a Debiai-server Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - -name: build - -on: - push: - branches: - - main - - 205-debiai-easy-start-module-to-launch-debiai-standlalone -jobs: - deploy: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v3 - with: - python-version: "3.x" - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel - pip install build wheel - - name: Build - run: | - python setup.py sdist bdist_wheel - - name: Publish package - uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} - packages_dir: dist From 6738b61d0d8e1077de59789d04dec388c679213e Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 14:24:20 +0200 Subject: [PATCH 27/97] rename backend dir in run section --- .github/workflows/debiai-gui-publish.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index d4c032867..e4ad3b7ea 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -25,13 +25,13 @@ jobs: run: | cd frontend FRONTEND_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) - cd ../backend + cd ../debiaiServer BACKEND_VERSION=$(cat swagger.yaml | grep -m1 version | cut -d ':' -f 2 | sed 's/ //g') if [ "$FRONTEND_VERSION" != "$BACKEND_VERSION" ]; then - echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != backend/swagger.yaml version '$BACKEND_VERSION'." + echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != debiaiServer/swagger.yaml version '$BACKEND_VERSION'." exit 1 fi - echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == backend/swagger.yaml version '$BACKEND_VERSION'." + echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == debiaiServer/swagger.yaml version '$BACKEND_VERSION'." echo "BRANCH_VERSION=$FRONTEND_VERSION" >> $GITHUB_OUTPUT - uses: actions/checkout@v3 From c075c1e8fa2985a9083bafba6b69dee02055f380 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 4 Jul 2024 15:56:58 +0200 Subject: [PATCH 28/97] version update --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bdc6b276d..ced2bb5cb 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="debiai-gui", - version="0.1.0", + version="0.1.2", packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, install_requires=[ From 07d47ed651d599eb9119d0b93ee6811ac92e0de7 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 14:52:06 +0200 Subject: [PATCH 29/97] ignore files updated --- .dockerignore | 12 ++++++------ .gitignore | 38 +++++++++++++++++--------------------- 2 files changed, 23 insertions(+), 27 deletions(-) diff --git a/.dockerignore b/.dockerignore index 51c8338f4..1be3c1adf 100644 --- a/.dockerignore +++ b/.dockerignore @@ -7,12 +7,12 @@ kubernetes.yaml LICENSE # Back -backend/__pycache__/ -backend/data/ -backend/tests/ -backend/dist/ -backend/.gitignore -backend/README.md +debiaiServer/__pycache__/ +debiaiServer/data/ +debiaiServer/tests/ +debiaiServer/dist/ +debiaiServer/.gitignore +debiaiServer/README.md # Front frontend/node_modules/ diff --git a/.gitignore b/.gitignore index 2cedd5e05..4fc80f53c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,32 +1,28 @@ -dist/ -deployment/ -.vscode/ - -# Back -backend/data -.coverage +# Byte-compiled / optimized / DLL files __pycache__/ -# Backend.egg-info -backend.egg-info/ +# Build and packaging +dist/ +build/ +*data/ +*.egg-info/ -# Front +# Dependency directories frontend/node_modules/ -# Build -build/ - -# Data -data/ +# coverage +.coverage -# Ignore the notebookDebiai.ipynb file +# Tests and utils notebookDebiai.ipynb - -# Ignore the test_imports.py file test_imports.py - -# Ignore the tree_project.md file tree_project.md -# Ignore the build and run file +# Scripts build_and_run.sh + +# Deployement +deployment/ + +# VSCODE +.vscode/ From aafff63cd2b584f86f22011511793cc71df4dd3a Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 14:58:23 +0200 Subject: [PATCH 30/97] rename and version var --- Dockerfile | 6 +++--- debiaiServer/websrv.py | 1 - setup.py | 6 ++++-- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index d15099766..75cd98f4f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,10 +5,10 @@ COPY frontend/ . RUN npm install RUN npm run build -# DebiAI Python Backend +# DebiAI Python debiaiServer (old backend dir) FROM python:3.10.12-slim-bullseye -WORKDIR /backend -COPY backend/ . +WORKDIR /debiaiServer +COPY debiaiServer/ . RUN pip install --trusted-host pypi.python.org -r requirements.txt COPY --from=build-stage /frontend/dist dist ENV FLASK_ENV production diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 2b0485209..2f89d9945 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -4,7 +4,6 @@ import webbrowser import psutil from termcolor import colored -from threading import Timer from flask_cors import CORS from flask import send_from_directory, request, Response from debiaiServer.init import init diff --git a/setup.py b/setup.py index ced2bb5cb..4e61d64ca 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,10 @@ from setuptools import setup, find_packages +VERSION = "0.27.1" # Same as DebiAI app (c.f swagger.yml) + setup( name="debiai-gui", - version="0.1.2", + version=VERSION, packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, install_requires=[ @@ -24,7 +26,7 @@ ], entry_points={ "console_scripts": [ - "debiai-start=debiaiServer.server:run", + "debiai =debiaiServer.server:run", ], }, author="IRT-Systemx", From 593447b2d95a007e2589bced715bcb2efd680bc8 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 15:32:54 +0200 Subject: [PATCH 31/97] pipeline actions for build and frontend and command to start debiai --- .github/workflows/debiai-gui-publish.yml | 13 ++++++++++++- setup.py | 2 +- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index e4ad3b7ea..d14a1bca2 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -79,14 +79,25 @@ jobs: uses: actions/setup-python@v3 with: python-version: "3.x" + - name: Set up Node.js + uses: actions/setup-node@v3 + with: + node-version: "16" + - name: Build frontend + run: | + cd frontdend + npm install + npm run build - name: Install dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel - pip install build wheel - name: Build run: | python setup.py sdist bdist_wheel + - name: Use Frontend build + run: | + cp -r frontend/dist debiaiServer/dist - name: Publish package uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 with: diff --git a/setup.py b/setup.py index 4e61d64ca..56bbdcd56 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ ], entry_points={ "console_scripts": [ - "debiai =debiaiServer.server:run", + "debiai-start=debiaiServer.server:run", ], }, author="IRT-Systemx", From 03a4bc33967b24c2d8175e7d57683a3733ca1a21 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 15:36:25 +0200 Subject: [PATCH 32/97] frontend dir --- .github/workflows/debiai-gui-publish.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index d14a1bca2..f8bafdd71 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -85,6 +85,7 @@ jobs: node-version: "16" - name: Build frontend run: | + mkdir frontend cd frontdend npm install npm run build From fd734d19ce19cf0b125dfe5be5ca92eee221dff1 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 15:39:28 +0200 Subject: [PATCH 33/97] frontend test --- .github/workflows/debiai-gui-publish.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index f8bafdd71..af284169e 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -85,8 +85,7 @@ jobs: node-version: "16" - name: Build frontend run: | - mkdir frontend - cd frontdend + cd frontend npm install npm run build - name: Install dependencies From 57f58f67cfeb585d609eeb5628d896421f33ae89 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 15:47:12 +0200 Subject: [PATCH 34/97] version updated --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 56bbdcd56..768198efb 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = "0.27.1" # Same as DebiAI app (c.f swagger.yml) +VERSION = "0.27.2" # Same as DebiAI app (c.f swagger.yml) setup( name="debiai-gui", From 61626a23dc421300227254a87503b2b5f73aec09 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 5 Jul 2024 15:55:08 +0200 Subject: [PATCH 35/97] debiai command --- build_and_run.sh | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/build_and_run.sh b/build_and_run.sh index 213813a2b..5bf773ecc 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -1,7 +1,5 @@ -#!/bin/bash - # Remove previous build and dist directories -rm -rf build dist backend.egg-info +rm -rf build dist debiaiServer.egg-info # Generated source distribution and wheel distribution python3 setup.py sdist bdist_wheel From 2f934b4c7fd187cb0754d56aa9e4727a79c3f15f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:12:29 +0200 Subject: [PATCH 36/97] new version and erased beta version --- .github/workflows/debiai-gui-publish.yml | 6 ------ setup.py | 2 +- 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index af284169e..f2f60ee7b 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -47,12 +47,6 @@ jobs: echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" echo "BRANCH_VERSION=$BRANCH_VERSION" - # If pervious version is beta, ignore test - if [[ $PREVIOUS_VERSION == *"beta"* ]]; then - echo "Beta version detected, skipping version upgrade check." - exit 0 - fi - if [ "$BRANCH_VERSION" == "" ]; then echo "No version found in current branch." exit 1 diff --git a/setup.py b/setup.py index 768198efb..914ba4c32 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = "0.27.2" # Same as DebiAI app (c.f swagger.yml) +VERSION = "0.27.3" setup( name="debiai-gui", From aff0e2b11b0797c28fd6ed71d3c9e561be627246 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:15:34 +0200 Subject: [PATCH 37/97] update version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 914ba4c32..0cdcc7bcc 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = "0.27.3" +VERSION = "0.28.0" setup( name="debiai-gui", From ed78f9f27cebbb295537c28a950e583f71d37b40 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:36:18 +0200 Subject: [PATCH 38/97] build then setup --- .github/workflows/debiai-gui-publish.yml | 56 ++---------------------- 1 file changed, 3 insertions(+), 53 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index f2f60ee7b..1ab453318 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -14,56 +14,6 @@ on: - main - 205-debiai-easy-start-module-to-launch-debiai-standlalone jobs: - version-upgrade-check: # Check that the version is greater than the previous commit version - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - - name: Check that the version in backend and frontend are the same - id: version-check - run: | - cd frontend - FRONTEND_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) - cd ../debiaiServer - BACKEND_VERSION=$(cat swagger.yaml | grep -m1 version | cut -d ':' -f 2 | sed 's/ //g') - if [ "$FRONTEND_VERSION" != "$BACKEND_VERSION" ]; then - echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != debiaiServer/swagger.yaml version '$BACKEND_VERSION'." - exit 1 - fi - echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == debiaiServer/swagger.yaml version '$BACKEND_VERSION'." - echo "BRANCH_VERSION=$FRONTEND_VERSION" >> $GITHUB_OUTPUT - - - uses: actions/checkout@v3 - with: - ref: main - - - name: Check that the version is greater than the previous commit version - run: | - BRANCH_VERSION=${{ steps.version-check.outputs.BRANCH_VERSION }} - cd frontend - PREVIOUS_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) - - echo "PREVIOUS_VERSION=$PREVIOUS_VERSION" - echo "BRANCH_VERSION=$BRANCH_VERSION" - - if [ "$BRANCH_VERSION" == "" ]; then - echo "No version found in current branch." - exit 1 - fi - - if [ "$PREVIOUS_VERSION" == "" ]; then - echo "No version found in main branch." - exit 1 - fi - - if [[ $PREVIOUS_VERSION == $BRANCH_VERSION ]]; then - echo "Version not upgraded: frontend/package.json version '$PREVIOUS_VERSION' == branch version '$BRANCH_VERSION'." - exit 1 - fi - - if [[ $PREVIOUS_VERSION > $BRANCH_VERSION ]]; then - echo "Version not upgraded: frontend/package.json version '$PREVIOUS deploy: runs-on: ubuntu-latest @@ -86,12 +36,12 @@ jobs: run: | python -m pip install --upgrade pip pip install setuptools wheel + - name: Use Frontend build + run: | + cp -r frontend/dist debiaiServer/dist - name: Build run: | python setup.py sdist bdist_wheel - - name: Use Frontend build - run: | - cp -r frontend/dist debiaiServer/dist - name: Publish package uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 with: From 7b5aca8d62693a309c0fff732a2ddbd724ecb174 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:41:10 +0200 Subject: [PATCH 39/97] moved frontend copy after build --- .github/workflows/debiai-gui-publish.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 1ab453318..5c72f9695 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -32,13 +32,13 @@ jobs: cd frontend npm install npm run build + - name: Use Frontend build + run: | + cp -r /dist debiaiServer/dist - name: Install dependencies run: | python -m pip install --upgrade pip pip install setuptools wheel - - name: Use Frontend build - run: | - cp -r frontend/dist debiaiServer/dist - name: Build run: | python setup.py sdist bdist_wheel From 98608776906ac823fc274045065bc09a158f2b40 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:44:15 +0200 Subject: [PATCH 40/97] frontend paths --- .github/workflows/debiai-gui-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 5c72f9695..a043f38a2 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -34,7 +34,7 @@ jobs: npm run build - name: Use Frontend build run: | - cp -r /dist debiaiServer/dist + cp -r dist ../debiaiServer/dist - name: Install dependencies run: | python -m pip install --upgrade pip From 2e0db8f3405825dc959c89bcb20466d6c3523e28 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:48:01 +0200 Subject: [PATCH 41/97] frontend build action --- .github/workflows/debiai-gui-publish.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index a043f38a2..2b02845cb 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -32,8 +32,6 @@ jobs: cd frontend npm install npm run build - - name: Use Frontend build - run: | cp -r dist ../debiaiServer/dist - name: Install dependencies run: | From 289700edd7a641c593047aae49b596da8d7f6da6 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 15:50:57 +0200 Subject: [PATCH 42/97] version updated --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 0cdcc7bcc..9c7fa1dc2 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from setuptools import setup, find_packages -VERSION = "0.28.0" +VERSION = "0.28.1" setup( name="debiai-gui", From f5a4ee776a07167f4926ecf8273d23237e973c87 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 8 Jul 2024 16:06:36 +0200 Subject: [PATCH 43/97] version fetched from swagger --- setup.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9c7fa1dc2..982ed2664 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,14 @@ from setuptools import setup, find_packages +import yaml -VERSION = "0.28.1" + +def get_version_from_swagger(): + with open("debiaiServer/swagger.yaml", "r") as f: + swagger_data = yaml.safe_load(f) + return swagger_data["info"]["version"] + + +VERSION = get_version_from_swagger() setup( name="debiai-gui", From 34e4d0b4dc378253853c3051ce5222ec0376bb08 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 9 Jul 2024 14:53:39 +0200 Subject: [PATCH 44/97] new command and yaml installation --- .github/workflows/debiai-gui-publish.yml | 1 + build_and_run.sh | 2 +- debiaiServer/server.py | 6 +++++- setup.py | 2 +- 4 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 2b02845cb..a87033bbd 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -36,6 +36,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip + pip install pyyaml pip install setuptools wheel - name: Build run: | diff --git a/build_and_run.sh b/build_and_run.sh index 5bf773ecc..fd985743c 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -8,4 +8,4 @@ python3 setup.py sdist bdist_wheel pip install . # Run the package -debiai-start +debiai-gui start diff --git a/debiaiServer/server.py b/debiaiServer/server.py index a06135426..32668170d 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -1,7 +1,11 @@ from threading import Timer +import sys from debiaiServer.websrv import start_server, open_browser def run(): Timer(1, open_browser).start() - start_server(reloader=False) + if len(sys.argv) > 1 and sys.argv[1] == "start": + start_server(reloader=False) + else: + print("Invalid command. Use 'debiai-gui start'") diff --git a/setup.py b/setup.py index 982ed2664..01d5e3405 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,7 @@ def get_version_from_swagger(): ], entry_points={ "console_scripts": [ - "debiai-start=debiaiServer.server:run", + "debiai-gui=debiaiServer.server:run", ], }, author="IRT-Systemx", From e17419493ecb883192ae364e83eaada8f2f085d5 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 9 Jul 2024 14:57:06 +0200 Subject: [PATCH 45/97] update swagger version --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index eebd8b772..8ade42421 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.27.1 + version: 0.28.2 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From e187ccb42c52f1363a4943ceb7b3f78d7b86068a Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 10 Jul 2024 17:29:09 +0200 Subject: [PATCH 46/97] doc and command options --- README.md | 4 ++-- debiaiServer/server.py | 37 +++++++++++++++++++++++++++++++++---- setup.py | 10 ++-------- 3 files changed, 37 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 0c846144c..4426ac5d0 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,9 @@ -## Why DebiAI ? +## Why DebiAI Gui? -DebiAI is an open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. +DebiAI Gui is an open source package that allows you to launch DebiAI in a standalone state. Thus by installing this module you can use quickly the DebiAI open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. DebiAI provides data scientists with features to: diff --git a/debiaiServer/server.py b/debiaiServer/server.py index 32668170d..46d355e18 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -1,11 +1,40 @@ from threading import Timer import sys from debiaiServer.websrv import start_server, open_browser +from debiaiServer.utils.utils import get_app_version + +BRIGHT_CYAN = "\033[96m" +BRIGHT_GREEN = "\033[92m" +RESET = "\033[0m" # called to return to standard terminal text color + + +def print_bash_info(): + print( + f"{BRIGHT_CYAN}Usage:{RESET} debiai-gui [OPTIONS] COMMAND\n\n" + "\t" + "Use the line below to run the app: \n\n" + "\t\t" + "$ debiai-gui" + f"{BRIGHT_GREEN} start{RESET}\n\n" + f"{BRIGHT_CYAN}Options:{RESET}\n" + "\t" + f"{BRIGHT_GREEN}--version {RESET} Prints DebiAI version number.\n" + "\t" + f"{BRIGHT_CYAN}Commands:{RESET}\n" + "\t" + f"{BRIGHT_GREEN}start {RESET} Starts the DebiAI GUI and open it in a web browser.\n" + ) def run(): - Timer(1, open_browser).start() - if len(sys.argv) > 1 and sys.argv[1] == "start": - start_server(reloader=False) + if len(sys.argv) > 1: + if sys.argv[1] == "start": + Timer(1, open_browser).start() + start_server(reloader=False) + elif sys.argv[1] == "--version": + version = get_app_version() + print("DebiAI Version:" f"{BRIGHT_GREEN}{version}{RESET}") + else: + print_bash_info() else: - print("Invalid command. Use 'debiai-gui start'") + print_bash_info() diff --git a/setup.py b/setup.py index 01d5e3405..022aeed0a 100644 --- a/setup.py +++ b/setup.py @@ -1,14 +1,8 @@ from setuptools import setup, find_packages -import yaml +from debiaiServer.utils.utils import get_app_version -def get_version_from_swagger(): - with open("debiaiServer/swagger.yaml", "r") as f: - swagger_data = yaml.safe_load(f) - return swagger_data["info"]["version"] - - -VERSION = get_version_from_swagger() +VERSION = get_app_version() setup( name="debiai-gui", From 8fca906333b1e5b01c1d79f96418ecd5638fae7d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 10 Jul 2024 17:37:48 +0200 Subject: [PATCH 47/97] added connexion dependency --- .github/workflows/debiai-gui-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index a87033bbd..aad26136f 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -36,7 +36,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pyyaml + pip install connexion pip install setuptools wheel - name: Build run: | From 42ef09054373499af4ea3410ac00adb9ae5b9262 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 10 Jul 2024 17:48:28 +0200 Subject: [PATCH 48/97] requirements installed in the pipeline --- .github/workflows/debiai-gui-publish.yml | 2 +- debiaiServer/requirements.txt | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index aad26136f..d90032fba 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -36,7 +36,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install connexion + pip install -r requirements.txt pip install setuptools wheel - name: Build run: | diff --git a/debiaiServer/requirements.txt b/debiaiServer/requirements.txt index 1ce3155be..ffcf31bca 100644 --- a/debiaiServer/requirements.txt +++ b/debiaiServer/requirements.txt @@ -12,4 +12,5 @@ openapi_spec_validator == 0.2.8 PyYAML == 6.0 cacheout == 0.14.1 termcolor==2.3.0 -werkzeug==2.2.2 \ No newline at end of file +werkzeug==2.2.2 +psutil==6.0.0 \ No newline at end of file From 69601be12f356e8a17c5baf73705701eb949c881 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 10 Jul 2024 17:50:55 +0200 Subject: [PATCH 49/97] requirements path --- .github/workflows/debiai-gui-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index d90032fba..96684457a 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -36,7 +36,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements.txt + pip install -r ../debiaiServer/requirements.txt pip install setuptools wheel - name: Build run: | From 4d32f835ca282b500cb376dc99ecd7677b2bbf3d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 10 Jul 2024 17:55:31 +0200 Subject: [PATCH 50/97] path corrected --- .github/workflows/debiai-gui-publish.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 96684457a..1717d14b4 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -36,7 +36,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r ../debiaiServer/requirements.txt + pip install -r debiaiServer/requirements.txt pip install setuptools wheel - name: Build run: | From e01584cd128eb0834f539b9c779967db63e2744a Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 10:09:55 +0200 Subject: [PATCH 51/97] enhanced bash info --- debiaiServer/server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/debiaiServer/server.py b/debiaiServer/server.py index 46d355e18..a5ac68c23 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -19,7 +19,6 @@ def print_bash_info(): f"{BRIGHT_CYAN}Options:{RESET}\n" "\t" f"{BRIGHT_GREEN}--version {RESET} Prints DebiAI version number.\n" - "\t" f"{BRIGHT_CYAN}Commands:{RESET}\n" "\t" f"{BRIGHT_GREEN}start {RESET} Starts the DebiAI GUI and open it in a web browser.\n" From 4ee86531f021ef44fbd4e09b665b4f77fac3ddef Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 10:12:29 +0200 Subject: [PATCH 52/97] update frontend version --- frontend/package-lock.json | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 8e70d656b..1b7d57230 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "debiai_frontend", - "version": "0.26.0", + "version": "0.27.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "debiai_frontend", - "version": "0.26.0", + "version": "0.27.1", "license": "Apache-2.0", "dependencies": { "axios": "^0.21.4", @@ -3705,6 +3705,8 @@ "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -3720,7 +3722,9 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/ajv-keywords": { "version": "3.5.2", @@ -15750,7 +15754,6 @@ "integrity": "sha512-yl+5qhpjd8e1G4cMXfORkkBlvtPCIgmRf3IYCWYDKIQ7m+PPa5iTm4feiNmCMD6yGqQWMhhK/7M3oWGL9boKwg==", "dev": true, "requires": { - "@babel/core": "^7.12.16", "@babel/helper-compilation-targets": "^7.12.16", "@babel/helper-module-imports": "^7.12.13", "@babel/plugin-proposal-class-properties": "^7.12.13", @@ -15763,7 +15766,6 @@ "@vue/babel-plugin-jsx": "^1.0.3", "@vue/babel-preset-jsx": "^1.1.2", "babel-plugin-dynamic-import-node": "^2.3.3", - "core-js": "^3.8.3", "core-js-compat": "^3.8.3", "semver": "^7.3.4" } @@ -16527,15 +16529,14 @@ "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "dev": true, - "requires": { - "ajv": "^8.0.0" - }, + "requires": {}, "dependencies": { "ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "version": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", "dev": true, + "optional": true, + "peer": true, "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -16547,7 +16548,9 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true + "dev": true, + "optional": true, + "peer": true } } }, From a1c6404ae6584d40342a176d0902079f62d7bcdf Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 15:40:57 +0200 Subject: [PATCH 53/97] package data to include swagger --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 022aeed0a..4a627ad9d 100644 --- a/setup.py +++ b/setup.py @@ -9,6 +9,7 @@ version=VERSION, packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, + package_data={"debiaiServer": ["swagger.yaml"]}, install_requires=[ "Flask==2.0.3", "flask_cors==3.0.8", From e554d7fd382a739e005d553724a57ec5977b915d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 15:58:17 +0200 Subject: [PATCH 54/97] no pip upgrade --- .github/workflows/debiai-gui-publish.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 1717d14b4..f02506132 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -35,7 +35,6 @@ jobs: cp -r dist ../debiaiServer/dist - name: Install dependencies run: | - python -m pip install --upgrade pip pip install -r debiaiServer/requirements.txt pip install setuptools wheel - name: Build From 6d761952d2eb7a10e24fc403a588245297575e94 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 16:56:50 +0200 Subject: [PATCH 55/97] fixed python version to 3.11 --- .github/workflows/debiai-gui-publish.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index f02506132..6bb2d967c 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -22,7 +22,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v3 with: - python-version: "3.x" + python-version: "3.11" - name: Set up Node.js uses: actions/setup-node@v3 with: @@ -35,6 +35,7 @@ jobs: cp -r dist ../debiaiServer/dist - name: Install dependencies run: | + python -m pip install --upgrade pip pip install -r debiaiServer/requirements.txt pip install setuptools wheel - name: Build From 958335a058c38559787a54ab902d9520250d7ad8 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 16:59:39 +0200 Subject: [PATCH 56/97] version update --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 8ade42421..a22d6064a 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.2 + version: 0.28.3 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 7f4d22aaf998a1c236f21af6fd406460efa75813 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:28:01 +0200 Subject: [PATCH 57/97] env var are availables in the package --- MANIFEST.in | 1 + debiaiServer/server.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 7b9fe797a..9206eda3e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ include debiaiServer/swagger.yaml +include debiaiServer/config * recursive-include debiaiServer/dist * diff --git a/debiaiServer/server.py b/debiaiServer/server.py index a5ac68c23..b4ad62d4f 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -5,7 +5,7 @@ BRIGHT_CYAN = "\033[96m" BRIGHT_GREEN = "\033[92m" -RESET = "\033[0m" # called to return to standard terminal text color +RESET = "\033[0m" def print_bash_info(): From 382d0e249bc77b2e688e3bd4576c3af6da87f805 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:30:29 +0200 Subject: [PATCH 58/97] version up --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index a22d6064a..0a79ed9bc 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.3 + version: 0.28.4 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From ccb957be043d0944d31ee0e64d58dd4a32387eca Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:37:54 +0200 Subject: [PATCH 59/97] env var included through configs files --- MANIFEST.in | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 9206eda3e..44a0b68c4 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,5 @@ include debiaiServer/swagger.yaml -include debiaiServer/config * +include debiaiServer/config/config.env +include debiaiServer/config/config.ini recursive-include debiaiServer/dist * From 295694dafb3fcef3a261691ae786ecb575d779e8 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:40:03 +0200 Subject: [PATCH 60/97] version updated --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 0a79ed9bc..ad052115d 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.4 + version: 0.28.5 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 86168c0d3a23f790e32b1b45d56dda863d8154b3 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:48:15 +0200 Subject: [PATCH 61/97] double check inclusion of config files --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4a627ad9d..a6b1bc55a 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ version=VERSION, packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, - package_data={"debiaiServer": ["swagger.yaml"]}, + package_data={"debiaiServer": ["swagger.yaml", "config.env", "config.ini"]}, install_requires=[ "Flask==2.0.3", "flask_cors==3.0.8", From 3beb5e13f08f03233faeefcffe31b9fe57e24b88 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 11 Jul 2024 17:50:36 +0200 Subject: [PATCH 62/97] version update --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index ad052115d..c99776297 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.5 + version: 0.28.6 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 8da0c2df9f7c9922a4395bf307ea38403aec3b7d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 12 Jul 2024 11:40:57 +0200 Subject: [PATCH 63/97] use of pkg ressources to get file paths --- debiaiServer/config/init_config.py | 4 ++-- debiaiServer/utils/utils.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/debiaiServer/config/init_config.py b/debiaiServer/config/init_config.py index 2079dcc08..98d93d85e 100644 --- a/debiaiServer/config/init_config.py +++ b/debiaiServer/config/init_config.py @@ -1,9 +1,9 @@ from configparser import ConfigParser from termcolor import colored - +import pkg_resources import os -config_path = "debiaiServer/config/config.ini" +config_path = pkg_resources.resource_filename("debiaiServer", "config/config.ini") config_parser = ConfigParser() DEBUG_COLOR = "light_blue" diff --git a/debiaiServer/utils/utils.py b/debiaiServer/utils/utils.py index 01840372a..0012c0b05 100644 --- a/debiaiServer/utils/utils.py +++ b/debiaiServer/utils/utils.py @@ -1,13 +1,16 @@ import time import yaml +import pkg_resources from yaml.loader import SafeLoader from urllib.parse import urlparse def get_app_version(): # Read the version from the API YAML file + yaml_path = pkg_resources.resource_filename("debiaiServer", "swagger.yaml") + try: - with open("debiaiServer/swagger.yaml") as f: + with open(yaml_path, "r") as f: data = yaml.load(f, Loader=SafeLoader) return data["info"]["version"] except Exception as e: From 7eeeec4845c3033506b9e3b794c0882108580758 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 12 Jul 2024 11:43:34 +0200 Subject: [PATCH 64/97] update swagger version --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index c99776297..870153860 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.6 + version: 0.28.7 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 98450a0c60f4df0d306ae082b14535f434e83088 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 12 Jul 2024 17:13:23 +0200 Subject: [PATCH 65/97] new path for data direcotry and psutil added to requirements --- .../pythonDataProvider/dataUtils/pythonModuleUtils.py | 4 +++- debiaiServer/server.py | 5 +++++ setup.py | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py index f98261689..4e9acd24f 100644 --- a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/pythonModuleUtils.py @@ -5,8 +5,10 @@ import string import shutil import time +import pkg_resources -DATA_PATH = "data/pythonDataProvider/" + +DATA_PATH = pkg_resources.resource_filename("debiaiServer", "data/pythonDataProvider/") DATA_TYPES = ["groundTruth", "contexts", "inputs", "others"] diff --git a/debiaiServer/server.py b/debiaiServer/server.py index b4ad62d4f..ca4b86e6f 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -2,11 +2,16 @@ import sys from debiaiServer.websrv import start_server, open_browser from debiaiServer.utils.utils import get_app_version +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, +) BRIGHT_CYAN = "\033[96m" BRIGHT_GREEN = "\033[92m" RESET = "\033[0m" +DATA_PATH = pythonModuleUtils.DATA_PATH + def print_bash_info(): print( diff --git a/setup.py b/setup.py index a6b1bc55a..72569b6f4 100644 --- a/setup.py +++ b/setup.py @@ -26,6 +26,7 @@ "cacheout==0.14.1", "termcolor==2.3.0", "werkzeug==2.2.2", + "psutil==6.0.0", ], entry_points={ "console_scripts": [ From d662e772f0b9f506150d3c65adcb0816c5590d65 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 12 Jul 2024 17:14:26 +0200 Subject: [PATCH 66/97] version update --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 870153860..4daf64ff1 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.7 + version: 0.28.8 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 4e5a78ada1407c4d1c2bcd8913a366154a64e26f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 15 Jul 2024 11:25:26 +0200 Subject: [PATCH 67/97] colored function instead of ANSI --- debiaiServer/server.py | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/debiaiServer/server.py b/debiaiServer/server.py index ca4b86e6f..d2d4a7f36 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -1,32 +1,33 @@ from threading import Timer +from termcolor import colored import sys from debiaiServer.websrv import start_server, open_browser from debiaiServer.utils.utils import get_app_version from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, ) +from debiaiServer.config.init_config import DEBUG_COLOR, SUCCESS_COLOR -BRIGHT_CYAN = "\033[96m" -BRIGHT_GREEN = "\033[92m" -RESET = "\033[0m" DATA_PATH = pythonModuleUtils.DATA_PATH -def print_bash_info(): +def bash_info(): print( - f"{BRIGHT_CYAN}Usage:{RESET} debiai-gui [OPTIONS] COMMAND\n\n" + colored("Usage example:", DEBUG_COLOR) + "debiai-gui [OPTIONS] COMMAND\n\n" "\t" "Use the line below to run the app: \n\n" "\t\t" "$ debiai-gui" - f"{BRIGHT_GREEN} start{RESET}\n\n" - f"{BRIGHT_CYAN}Options:{RESET}\n" - "\t" - f"{BRIGHT_GREEN}--version {RESET} Prints DebiAI version number.\n" - f"{BRIGHT_CYAN}Commands:{RESET}\n" - "\t" - f"{BRIGHT_GREEN}start {RESET} Starts the DebiAI GUI and open it in a web browser.\n" + + colored(" start\n\n") + + colored("Options:\n", DEBUG_COLOR) + + "\t" + + colored("--version ", SUCCESS_COLOR) + + "Prints DebiAI version number.\n" + + colored("Commands:\n", DEBUG_COLOR) + + "\t" + + colored("start ", SUCCESS_COLOR) + + " Starts the DebiAI GUI and open it in a web browser.\n" ) @@ -37,8 +38,8 @@ def run(): start_server(reloader=False) elif sys.argv[1] == "--version": version = get_app_version() - print("DebiAI Version:" f"{BRIGHT_GREEN}{version}{RESET}") + print("DebiAI Version:" + colored(version, SUCCESS_COLOR)) else: - print_bash_info() + bash_info() else: - print_bash_info() + bash_info() From 45974df151d4433d161f9eaec18e6a4257f5f6ea Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 15 Jul 2024 11:34:28 +0200 Subject: [PATCH 68/97] link to doc in bash --- debiaiServer/server.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/debiaiServer/server.py b/debiaiServer/server.py index d2d4a7f36..90c92eda8 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -14,7 +14,7 @@ def bash_info(): print( - colored("Usage example:", DEBUG_COLOR) + "debiai-gui [OPTIONS] COMMAND\n\n" + colored("Usage: ", DEBUG_COLOR) + "debiai-gui [OPTIONS] COMMAND\n\n" "\t" "Use the line below to run the app: \n\n" "\t\t" @@ -27,7 +27,8 @@ def bash_info(): + colored("Commands:\n", DEBUG_COLOR) + "\t" + colored("start ", SUCCESS_COLOR) - + " Starts the DebiAI GUI and open it in a web browser.\n" + + " Starts the DebiAI GUI and open it in a web browser.\n\n" + + "For more information visit : https://debiai.irt-systemx.fr/ \n\n" ) From 627cf5437252694a21710beddf8df7a71aefb76b Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 15 Jul 2024 17:30:48 +0200 Subject: [PATCH 69/97] ability to choose a port in command line --- debiaiServer/server.py | 47 ++++++++++++++++++++++++++++-------------- debiaiServer/websrv.py | 28 ++++++++++++++++--------- 2 files changed, 50 insertions(+), 25 deletions(-) diff --git a/debiaiServer/server.py b/debiaiServer/server.py index 90c92eda8..e3a4090db 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -1,15 +1,29 @@ +import sys +import argparse from threading import Timer from termcolor import colored -import sys -from debiaiServer.websrv import start_server, open_browser from debiaiServer.utils.utils import get_app_version +from debiaiServer.websrv import start_server, open_browser +from debiaiServer.config.init_config import DEBUG_COLOR, SUCCESS_COLOR from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, ) -from debiaiServer.config.init_config import DEBUG_COLOR, SUCCESS_COLOR DATA_PATH = pythonModuleUtils.DATA_PATH +PORT = 3000 # default port + + +def parse_arguments(): + parser = argparse.ArgumentParser(description="Run the DebiAI GUI") + parser.add_argument("command", nargs="?", help="Start the DebiAI GUI server") + parser.add_argument( + "--port", type=int, default=PORT, help="Port on which to run the DebiAI GUI" + ) + parser.add_argument( + "--version", action="store_true", help="Prints the version number of DebiAI" + ) + return parser.parse_args() def bash_info(): @@ -23,24 +37,27 @@ def bash_info(): + colored("Options:\n", DEBUG_COLOR) + "\t" + colored("--version ", SUCCESS_COLOR) - + "Prints DebiAI version number.\n" + + " Prints DebiAI version number.\n" + + "\t" + + colored("--port [NUMBER] ", SUCCESS_COLOR) + + "Allows you to choose a port.\n" + colored("Commands:\n", DEBUG_COLOR) + "\t" + colored("start ", SUCCESS_COLOR) - + " Starts the DebiAI GUI and open it in a web browser.\n\n" - + "For more information visit : https://debiai.irt-systemx.fr/ \n\n" + + "Starts the DebiAI GUI and open it in a web browser.\n\n" + + "For more information visit: " + + colored("https://debiai.irt-systemx.fr/ \n\n", None, attrs=["bold"]) ) def run(): - if len(sys.argv) > 1: - if sys.argv[1] == "start": - Timer(1, open_browser).start() - start_server(reloader=False) - elif sys.argv[1] == "--version": - version = get_app_version() - print("DebiAI Version:" + colored(version, SUCCESS_COLOR)) - else: - bash_info() + args = parse_arguments() + + if args.version: + version = get_app_version() + print("DebiAI Version:" + colored(version, SUCCESS_COLOR)) + elif args.command == "start": + Timer(1, lambda: open_browser(args.port)).start() + start_server(args.port, reloader=False) else: bash_info() diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 2f89d9945..914a67a9f 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -1,17 +1,19 @@ -import connexion import os +import sys +import psutil +import logging import requests +import connexion import webbrowser -import psutil -from termcolor import colored from flask_cors import CORS -from flask import send_from_directory, request, Response +from termcolor import colored from debiaiServer.init import init from debiaiServer.utils.utils import get_app_version from debiaiServer.config.init_config import DEBUG_COLOR +from flask import send_from_directory, request, Response + DEV_FRONTEND_URL = "http://localhost:8080/" -PORT = 3000 app = connexion.App(__name__) app.add_api("swagger.yaml", strict_validation=True) CORS(app.app) @@ -80,22 +82,28 @@ def is_browser_open(): return False -def open_browser(): - url = f"http://localhost:{PORT}" +def open_browser(port): + url = f"http://localhost:{port}" if is_browser_open(): webbrowser.open_new_tab(url) else: webbrowser.open(url) -def start_server(reloader=True): +def start_server(port, reloader=True): # Run DebiAI init print("================= DebiAI " + get_app_version() + " ====================") init() print("======================== RUN =======================") print( " DebiAI is available at " - + colored("http://localhost:" + str(PORT), DEBUG_COLOR) + + colored("http://localhost:" + str(port), DEBUG_COLOR) ) + + from gevent.pywsgi import WSGIServer + app = create_app() - app.run(port=PORT, debug=True, use_reloader=reloader) + http_server = WSGIServer(("127.0.0.1", port), app) + http_server.serve_forever() + + app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) From 90cf087c3853b0b33be3157405d2419c29ebe39f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 16 Jul 2024 11:11:05 +0200 Subject: [PATCH 70/97] wsgi for prod server condition --- debiaiServer/server.py | 2 +- debiaiServer/swagger.yaml | 2 +- debiaiServer/websrv.py | 19 +++++++++---------- frontend/cspell.json | 3 ++- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/debiaiServer/server.py b/debiaiServer/server.py index e3a4090db..c2f0fe9e4 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -58,6 +58,6 @@ def run(): print("DebiAI Version:" + colored(version, SUCCESS_COLOR)) elif args.command == "start": Timer(1, lambda: open_browser(args.port)).start() - start_server(args.port, reloader=False) + start_server(args.port, reloader=False, is_dev=True) else: bash_info() diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 4daf64ff1..2a0ece824 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.8 + version: 0.28.9 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 914a67a9f..77ee64b95 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -1,13 +1,12 @@ import os -import sys import psutil -import logging import requests import connexion import webbrowser from flask_cors import CORS from termcolor import colored from debiaiServer.init import init +from gevent.pywsgi import WSGIServer from debiaiServer.utils.utils import get_app_version from debiaiServer.config.init_config import DEBUG_COLOR from flask import send_from_directory, request, Response @@ -90,7 +89,7 @@ def open_browser(port): webbrowser.open(url) -def start_server(port, reloader=True): +def start_server(port, reloader=True, is_dev=True): # Run DebiAI init print("================= DebiAI " + get_app_version() + " ====================") init() @@ -100,10 +99,10 @@ def start_server(port, reloader=True): + colored("http://localhost:" + str(port), DEBUG_COLOR) ) - from gevent.pywsgi import WSGIServer - - app = create_app() - http_server = WSGIServer(("127.0.0.1", port), app) - http_server.serve_forever() - - app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) + if is_dev: + # Use flask server else wsgi server for production + app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) + else: + prod_app = create_app() + http_server = WSGIServer(("127.0.0.1", port), prod_app) + http_server.serve_forever() diff --git a/frontend/cspell.json b/frontend/cspell.json index ee33f4a5f..a3fc19a39 100644 --- a/frontend/cspell.json +++ b/frontend/cspell.json @@ -80,7 +80,8 @@ "zeroline", "Zindex", "zmax", - "zmin" + "zmin", + "psutil" ], "flagWords": [], "ignorePaths": [ From 238ffeca11f3b523e1fbf89e7a00c7b06aacad29 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 16 Jul 2024 11:14:51 +0200 Subject: [PATCH 71/97] added gevent module --- debiaiServer/requirements.txt | 3 ++- debiaiServer/swagger.yaml | 2 +- setup.py | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/debiaiServer/requirements.txt b/debiaiServer/requirements.txt index ffcf31bca..b38fd75ad 100644 --- a/debiaiServer/requirements.txt +++ b/debiaiServer/requirements.txt @@ -13,4 +13,5 @@ PyYAML == 6.0 cacheout == 0.14.1 termcolor==2.3.0 werkzeug==2.2.2 -psutil==6.0.0 \ No newline at end of file +psutil==6.0.0 +gevent==21.8.0 \ No newline at end of file diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 2a0ece824..f49522d1c 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.9 + version: 0.29.0 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/setup.py b/setup.py index 72569b6f4..0a2cf5404 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,7 @@ "termcolor==2.3.0", "werkzeug==2.2.2", "psutil==6.0.0", + "gevent==21.8.0", ], entry_points={ "console_scripts": [ From 754bd6a6614b1a189dc7cfb26e0357cadc597a56 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 16 Jul 2024 11:21:57 +0200 Subject: [PATCH 72/97] versionning --- debiaiServer/requirements.txt | 2 +- debiaiServer/swagger.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debiaiServer/requirements.txt b/debiaiServer/requirements.txt index b38fd75ad..f583c2763 100644 --- a/debiaiServer/requirements.txt +++ b/debiaiServer/requirements.txt @@ -14,4 +14,4 @@ cacheout == 0.14.1 termcolor==2.3.0 werkzeug==2.2.2 psutil==6.0.0 -gevent==21.8.0 \ No newline at end of file +gevent==24.2.1 \ No newline at end of file diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index f49522d1c..806811aa2 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.0 + version: 0.29.1 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 57a17ae30934d332af4a2b737db426938a03d56d Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Tue, 16 Jul 2024 14:36:55 +0200 Subject: [PATCH 73/97] waitress server for prod --- debiaiServer/requirements.txt | 2 +- debiaiServer/server.py | 2 +- debiaiServer/swagger.yaml | 2 +- debiaiServer/websrv.py | 11 +++++------ setup.py | 2 +- 5 files changed, 9 insertions(+), 10 deletions(-) diff --git a/debiaiServer/requirements.txt b/debiaiServer/requirements.txt index f583c2763..fb76f855e 100644 --- a/debiaiServer/requirements.txt +++ b/debiaiServer/requirements.txt @@ -14,4 +14,4 @@ cacheout == 0.14.1 termcolor==2.3.0 werkzeug==2.2.2 psutil==6.0.0 -gevent==24.2.1 \ No newline at end of file +waitress==3.0.0 \ No newline at end of file diff --git a/debiaiServer/server.py b/debiaiServer/server.py index c2f0fe9e4..bb2b46f87 100644 --- a/debiaiServer/server.py +++ b/debiaiServer/server.py @@ -58,6 +58,6 @@ def run(): print("DebiAI Version:" + colored(version, SUCCESS_COLOR)) elif args.command == "start": Timer(1, lambda: open_browser(args.port)).start() - start_server(args.port, reloader=False, is_dev=True) + start_server(args.port, reloader=False, is_dev=False) else: bash_info() diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 806811aa2..5c618ce90 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.1 + version: 0.29.2 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 77ee64b95..80491e691 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -4,9 +4,9 @@ import connexion import webbrowser from flask_cors import CORS +from waitress import serve from termcolor import colored from debiaiServer.init import init -from gevent.pywsgi import WSGIServer from debiaiServer.utils.utils import get_app_version from debiaiServer.config.init_config import DEBUG_COLOR from flask import send_from_directory, request, Response @@ -98,11 +98,10 @@ def start_server(port, reloader=True, is_dev=True): " DebiAI is available at " + colored("http://localhost:" + str(port), DEBUG_COLOR) ) - + app = create_app() if is_dev: - # Use flask server else wsgi server for production + # Use flask server for development app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) else: - prod_app = create_app() - http_server = WSGIServer(("127.0.0.1", port), prod_app) - http_server.serve_forever() + # Use waitress for production + serve(app, host="0.0.0.0", port=port) diff --git a/setup.py b/setup.py index 0a2cf5404..d143fea8c 100644 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ "termcolor==2.3.0", "werkzeug==2.2.2", "psutil==6.0.0", - "gevent==21.8.0", + "waitress==3.0.0", ], entry_points={ "console_scripts": [ From 39fa3ba25a418f61a5d199729e1201b3e752c5a8 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 18 Jul 2024 11:43:03 +0200 Subject: [PATCH 74/97] script to run + docker file updated --- Dockerfile | 11 ++++++----- debiaiServer/websrv.py | 11 +++++++++-- run.py | 3 +++ 3 files changed, 18 insertions(+), 7 deletions(-) create mode 100644 run.py diff --git a/Dockerfile b/Dockerfile index 75cd98f4f..61eeb17e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,10 +7,11 @@ RUN npm run build # DebiAI Python debiaiServer (old backend dir) FROM python:3.10.12-slim-bullseye -WORKDIR /debiaiServer -COPY debiaiServer/ . -RUN pip install --trusted-host pypi.python.org -r requirements.txt -COPY --from=build-stage /frontend/dist dist +WORKDIR / +COPY debiaiServer/ debiaiServer/ +RUN pip install --trusted-host pypi.python.org -r debiaiServer/requirements.txt +COPY run.py . +COPY --from=build-stage /frontend/dist debiaiServer/dist ENV FLASK_ENV production -CMD ["python", "websrv.py"] +CMD ["python", "run.py"] diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 80491e691..de1a58d13 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -91,9 +91,12 @@ def open_browser(port): def start_server(port, reloader=True, is_dev=True): # Run DebiAI init - print("================= DebiAI " + get_app_version() + " ====================") + print( + "================= DebiAI " + get_app_version() + " ====================", + flush=True, + ) init() - print("======================== RUN =======================") + print("======================== RUN =======================", flush=True) print( " DebiAI is available at " + colored("http://localhost:" + str(port), DEBUG_COLOR) @@ -105,3 +108,7 @@ def start_server(port, reloader=True, is_dev=True): else: # Use waitress for production serve(app, host="0.0.0.0", port=port) + + +if __name__ == "__main__": + start_server(port=3000, reloader=False, is_dev=True) diff --git a/run.py b/run.py new file mode 100644 index 000000000..c1222f39a --- /dev/null +++ b/run.py @@ -0,0 +1,3 @@ +from debiaiServer import websrv + +websrv.start_server(port=3000, reloader=False, is_dev=False) From 373cf4b5d8d1e80a738c721b2660970ff8745226 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 18 Jul 2024 11:48:33 +0200 Subject: [PATCH 75/97] version update --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 5c618ce90..d4404b2e8 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.2 + version: 0.29.3 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From 9f3a22bb09342de9b78c69b52ceafe9e11f9b43b Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 18 Jul 2024 13:00:30 +0200 Subject: [PATCH 76/97] disable waitress warning --- debiaiServer/swagger.yaml | 2 +- debiaiServer/websrv.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index d4404b2e8..c424f5407 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.3 + version: 0.29.4 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index de1a58d13..35a60428b 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -1,6 +1,7 @@ import os import psutil import requests +import logging import connexion import webbrowser from flask_cors import CORS @@ -99,7 +100,8 @@ def start_server(port, reloader=True, is_dev=True): print("======================== RUN =======================", flush=True) print( " DebiAI is available at " - + colored("http://localhost:" + str(port), DEBUG_COLOR) + + colored("http://localhost:" + str(port), DEBUG_COLOR), + flush=True, ) app = create_app() if is_dev: @@ -107,6 +109,7 @@ def start_server(port, reloader=True, is_dev=True): app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) else: # Use waitress for production + logging.getLogger("requests").setLevel(logging.WARNING) serve(app, host="0.0.0.0", port=port) From bf087b22bf32e5c680611e0f47d3efbf672e7a94 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 18 Jul 2024 14:43:47 +0200 Subject: [PATCH 77/97] review --- .gitignore | 5 ----- Dockerfile | 2 +- .../algorithms/classificationErrorMetric.py | 2 -- .../algorithms/regressionErrorMetric.py | 2 -- 4 files changed, 1 insertion(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 4fc80f53c..112ade474 100644 --- a/.gitignore +++ b/.gitignore @@ -13,11 +13,6 @@ frontend/node_modules/ # coverage .coverage -# Tests and utils -notebookDebiai.ipynb -test_imports.py -tree_project.md - # Scripts build_and_run.sh diff --git a/Dockerfile b/Dockerfile index 61eeb17e9..00eb9d1eb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ COPY frontend/ . RUN npm install RUN npm run build -# DebiAI Python debiaiServer (old backend dir) +# DebiAI Python debiaiServer FROM python:3.10.12-slim-bullseye WORKDIR / COPY debiaiServer/ debiaiServer/ diff --git a/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py index f3b83c3d3..99652a7a3 100644 --- a/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/classificationErrorMetric.py @@ -2,8 +2,6 @@ get_input_from_inputs, ) -# from ..utils import get_input_from_inputs - # This algorithm is a simple classification metric calculator # It takes a list of values corresponding to the ground truth # and a list of values corresponding to the predictions diff --git a/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py index dca073db1..117e8b5e8 100644 --- a/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/algorithms/regressionErrorMetric.py @@ -2,8 +2,6 @@ get_input_from_inputs, ) -# from ..utils import get_input_from_inputs - # This algorithm is a simple regression metric calculator # It takes a list of numbers corresponding to an error From 59e59e472b5729ce145c40fafafc0bead3746b3f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 10:21:20 +0200 Subject: [PATCH 78/97] rename, comments and updated markdown --- .github/workflows/debiai-gui-publish.yml | 2 +- README.md | 4 +- .../{server.py => debiai_gui_utils.py} | 0 debiaiServer/swagger.yaml | 2 +- debiai_gui.md | 86 +++++++++++++++++++ frontend/package.json | 2 +- run.py => run_debiai_server_prod.py | 2 + setup.py | 5 +- 8 files changed, 95 insertions(+), 8 deletions(-) rename debiaiServer/{server.py => debiai_gui_utils.py} (100%) create mode 100644 debiai_gui.md rename run.py => run_debiai_server_prod.py (57%) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 6bb2d967c..77f019822 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -1,4 +1,4 @@ -# This workflow will upload a Debiai-gui Package using Twine when a release is created +# This workflow will upload a Debiai-gui Package using Twine when the main branch is updated # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries # This workflow uses actions that are not certified by GitHub. diff --git a/README.md b/README.md index 4426ac5d0..0c846144c 100644 --- a/README.md +++ b/README.md @@ -15,9 +15,9 @@ -## Why DebiAI Gui? +## Why DebiAI ? -DebiAI Gui is an open source package that allows you to launch DebiAI in a standalone state. Thus by installing this module you can use quickly the DebiAI open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. +DebiAI is an open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. DebiAI provides data scientists with features to: diff --git a/debiaiServer/server.py b/debiaiServer/debiai_gui_utils.py similarity index 100% rename from debiaiServer/server.py rename to debiaiServer/debiai_gui_utils.py diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index c424f5407..cc1cc26db 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.4 + version: 0.29.5 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/debiai_gui.md b/debiai_gui.md new file mode 100644 index 000000000..e2a0960e9 --- /dev/null +++ b/debiai_gui.md @@ -0,0 +1,86 @@ +
+ + +[![Online documentation](https://img.shields.io/static/v1?label=&message=Online documentation&color=0077de)](https://debiai.irt-systemx.fr/) +
+[![License](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) +![cd](https://github.com/debiai/debiai/actions/workflows/docker-push.yml/badge.svg) +
+![Activity](https://img.shields.io/github/commit-activity/m/debiai/debiai) +![Last commit](https://img.shields.io/github/last-commit/debiai/debiai) +
+[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) +[![Code style: flake8](https://img.shields.io/badge/code%20style-flake8-1c4a6c.svg)](https://flake8.pycqa.org/en/latest/) +[![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg?style=flat-square)](https://github.com/prettier/prettier) + +
+ +## Why DebiAI Gui? + +DebiAI Gui is an open source package that allows you to launch DebiAI in a standalone state. Thus by installing this module you can use quickly the DebiAI open-source web application that aims to facilitate the process of developing Machine Learning models, especially in the stage of the project data analysis and the model performance comparison. + +DebiAI provides data scientists with features to: + +- Identify biases and errors in your input, results, contextual or ground truth project data +- Make a comparison of the performance of your ML according to their contextual results +- Select and create sets of data graphically for further analysis or (re-)training purposes +- Quickly create and share statistical visualizations of your project data for your team or client + +## Documentation + +The full documentation is available on the [DebiAI website](https://debiai.irt-systemx.fr/). + +## Dashboard + +DebiAI has a Web Graphical User Interface with a complete data visualization toolkit offering many statistical analysis tools: + +

+ +

+ +The dashboard is highly customizable and can be used for large and small projects. Learn more about the [widgets and how to use them](https://debiai.irt-systemx.fr/dashboard/widgets/). + +## Data + +DebiAI is designed to be used for any kind projects and data, it is particularly useful for projects that involve many contextual data. + +DebiAI provide two main ways to import your data: + +- A [DebiAI Python module](https://debiai.irt-systemx.fr/dataInsertion/pythonModule/) is provided to insert, directly from your Python workflow, the data and model results that you want to study. +- You can also create a [Data Provider](https://debiai.irt-systemx.fr/dataInsertion/dataProviders/), a Web API that will allow DebiAI to reach your data and model results from any programming language and any data sources without duplication. + Check out the [DebiAI Data Provider NodeJs template](https://github.com/debiai/data-provider-nodejs-template) for an example of a Data Provider. + +## Installation + +DebiAI-GUI is available as a python module with pip. To install it, you can follow the [installation guide](https://debiai.irt-systemx.fr/introduction/gettingStarted/installation). + +## Use cases + +As part of the [Confiance.ai](https://www.confiance.ai/) program, we (the [IRT SystemX](https://www.irt-systemx.fr/)) are using and developing DebiAI for a wide range of use cases. + +One of them is the [Valeo - WoodScape](https://woodscape.valeo.com/) dataset: + +### Valeo - WoodScape + +The Valeo - WoodScape dataset is an annotated image dataset taken from 4 fisheye cameras. DebiAI is used to analyze the dataset for biases and outliers in the data. + +

+ +

+ +Withing the [Confiance.ai](https://www.confiance.ai/) program, DebiAI has been able to import the project data, detect biases, find annotations errors and export them to the project's image annotation tool. + +--- + +

+ DebiAI-GUI is developed by + + + + And is integrated in + + + +

+ +--- diff --git a/frontend/package.json b/frontend/package.json index 9aade6fc9..5c910b854 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "debiai_frontend", - "version": "0.27.1", + "version": "0.28.0", "description": "Frontend for Debiai, made with Vuejs", "license": "Apache-2.0", "scripts": { diff --git a/run.py b/run_debiai_server_prod.py similarity index 57% rename from run.py rename to run_debiai_server_prod.py index c1222f39a..e3a97c04f 100644 --- a/run.py +++ b/run_debiai_server_prod.py @@ -1,3 +1,5 @@ +# This file is used to run the debiai-gui server in production mode + from debiaiServer import websrv websrv.start_server(port=3000, reloader=False, is_dev=False) diff --git a/setup.py b/setup.py index d143fea8c..00d038ae4 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,6 @@ version=VERSION, packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, - package_data={"debiaiServer": ["swagger.yaml", "config.env", "config.ini"]}, install_requires=[ "Flask==2.0.3", "flask_cors==3.0.8", @@ -31,13 +30,13 @@ ], entry_points={ "console_scripts": [ - "debiai-gui=debiaiServer.server:run", + "debiai-gui=debiaiServer.debiai_gui_utils:run", ], }, author="IRT-Systemx", author_email="debiai@irt-systemx.fr", description="DebiAI easy start module, the standalone version of DebiAI", - long_description=open("README.md").read(), + long_description=open("debiai_gui.md").read(), long_description_content_type="text/markdown", url="https://github.com/debiai/DebiAI", classifiers=[ From 294afc60ec602da7ecbd031a5d7aaa8ea6737f84 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 10:32:34 +0200 Subject: [PATCH 79/97] added images for doc purposes --- MANIFEST.in | 1 + debiaiServer/swagger.yaml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index 44a0b68c4..467a68550 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,6 @@ include debiaiServer/swagger.yaml include debiaiServer/config/config.env include debiaiServer/config/config.ini +include images/ * recursive-include debiaiServer/dist * diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index cc1cc26db..229a111fa 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.5 + version: 0.29.6 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From f2d6c82e3d1747bb5d545ad6844bd2a8806080c1 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 11:04:07 +0200 Subject: [PATCH 80/97] added images to md with github links and enahnce number of threads --- debiaiServer/swagger.yaml | 2 +- debiaiServer/websrv.py | 4 +--- debiai_gui.md | 6 +++--- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index 229a111fa..dcb6586c8 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.6 + version: 0.29.7 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 35a60428b..7207441e1 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -1,7 +1,6 @@ import os import psutil import requests -import logging import connexion import webbrowser from flask_cors import CORS @@ -109,8 +108,7 @@ def start_server(port, reloader=True, is_dev=True): app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) else: # Use waitress for production - logging.getLogger("requests").setLevel(logging.WARNING) - serve(app, host="0.0.0.0", port=port) + serve(app, host="0.0.0.0", port=port, threads=6) if __name__ == "__main__": diff --git a/debiai_gui.md b/debiai_gui.md index e2a0960e9..706494b04 100644 --- a/debiai_gui.md +++ b/debiai_gui.md @@ -1,5 +1,5 @@
- + [![Online documentation](https://img.shields.io/static/v1?label=&message=Online documentation&color=0077de)](https://debiai.irt-systemx.fr/)
@@ -35,7 +35,7 @@ The full documentation is available on the [DebiAI website](https://debiai.irt-s DebiAI has a Web Graphical User Interface with a complete data visualization toolkit offering many statistical analysis tools:

- +

The dashboard is highly customizable and can be used for large and small projects. Learn more about the [widgets and how to use them](https://debiai.irt-systemx.fr/dashboard/widgets/). @@ -65,7 +65,7 @@ One of them is the [Valeo - WoodScape](https://woodscape.valeo.com/) dataset: The Valeo - WoodScape dataset is an annotated image dataset taken from 4 fisheye cameras. DebiAI is used to analyze the dataset for biases and outliers in the data.

- +

Withing the [Confiance.ai](https://www.confiance.ai/) program, DebiAI has been able to import the project data, detect biases, find annotations errors and export them to the project's image annotation tool. From 7d303997ef13da0abba136b45b8a1232a8c4d161 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 11:20:01 +0200 Subject: [PATCH 81/97] corrected image links --- debiai_gui.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/debiai_gui.md b/debiai_gui.md index 706494b04..6c0320ef7 100644 --- a/debiai_gui.md +++ b/debiai_gui.md @@ -1,5 +1,5 @@
- + [![Online documentation](https://img.shields.io/static/v1?label=&message=Online documentation&color=0077de)](https://debiai.irt-systemx.fr/)
@@ -35,7 +35,7 @@ The full documentation is available on the [DebiAI website](https://debiai.irt-s DebiAI has a Web Graphical User Interface with a complete data visualization toolkit offering many statistical analysis tools:

- +

The dashboard is highly customizable and can be used for large and small projects. Learn more about the [widgets and how to use them](https://debiai.irt-systemx.fr/dashboard/widgets/). @@ -65,7 +65,7 @@ One of them is the [Valeo - WoodScape](https://woodscape.valeo.com/) dataset: The Valeo - WoodScape dataset is an annotated image dataset taken from 4 fisheye cameras. DebiAI is used to analyze the dataset for biases and outliers in the data.

- +

Withing the [Confiance.ai](https://www.confiance.ai/) program, DebiAI has been able to import the project data, detect biases, find annotations errors and export them to the project's image annotation tool. From 359b3781c74432a17df63233b10d39aaaab13279 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 11:20:26 +0200 Subject: [PATCH 82/97] updated version --- debiaiServer/swagger.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index dcb6586c8..4e9ad71b6 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.29.7 + version: 0.29.8 title: DebiAI_debiaiServer_API description: DebiAI backend api contact: From ead0c409a1f25d698351f6767af286b5aa0eac5e Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 15:59:22 +0200 Subject: [PATCH 83/97] rename backend into debiaiServer for PR --- .github/workflows/pull-request-check.yml | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/.github/workflows/pull-request-check.yml b/.github/workflows/pull-request-check.yml index ec95d56e4..08988c41f 100644 --- a/.github/workflows/pull-request-check.yml +++ b/.github/workflows/pull-request-check.yml @@ -8,7 +8,7 @@ on: - reopened jobs: - black-format-check: # Check that the backend codebase is formatted with black + black-format-check: # Check that the debiaiServer codebase is formatted with black name: Black format check runs-on: ubuntu-latest steps: @@ -19,12 +19,12 @@ jobs: python-version: 3.8 - name: Install dependencies and check black format run: | - cd backend + cd debiaiServer python -m pip install --upgrade pip pip install black black --check --diff . - flake8-check: # Check that the backend codebase does not contain linting errors + flake8-check: # Check that the debiaiServer codebase does not contain linting errors name: Flake8 check runs-on: ubuntu-latest steps: @@ -35,7 +35,7 @@ jobs: python-version: 3.8 - name: Install dependencies and check flake8 format run: | - cd backend + cd debiaiServer python -m pip install --upgrade pip pip install flake8 flake8 . @@ -73,18 +73,18 @@ jobs: steps: - uses: actions/checkout@v2 - - name: Check that the version in backend and frontend are the same + - name: Check that the version in debiaiServer and frontend are the same id: version-check run: | cd frontend FRONTEND_VERSION=$(cat package.json | grep -m1 version | cut -d '"' -f 4) - cd ../backend + cd ../debiaiServer BACKEND_VERSION=$(cat swagger.yaml | grep -m1 version | cut -d ':' -f 2 | sed 's/ //g') if [ "$FRONTEND_VERSION" != "$BACKEND_VERSION" ]; then - echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != backend/swagger.yaml version '$BACKEND_VERSION'." + echo "Version mismatch: frontend/package.json version '$FRONTEND_VERSION' != debiaiServer/swagger.yaml version '$BACKEND_VERSION'." exit 1 fi - echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == backend/swagger.yaml version '$BACKEND_VERSION'." + echo "Version match: frontend/package.json version '$FRONTEND_VERSION' == debiaiServer/swagger.yaml version '$BACKEND_VERSION'." echo "BRANCH_VERSION=$FRONTEND_VERSION" >> $GITHUB_OUTPUT - uses: actions/checkout@v3 @@ -146,13 +146,13 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - cd backend + cd debiaiServer python -m pip install --upgrade pip pip install pytest pip install -r requirements.txt - name: Test with pytest run: | - cd backend + cd debiaiServer python websrv.py & sleep 5 && pytest tests/ docker-build-check: # Build the docker image and check that it can run From ffcf5d36d87f80e20f6584bdb1d9708e8c68545f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 17:03:26 +0200 Subject: [PATCH 84/97] black --- debiaiServer/controller/algoProviders.py | 4 +++- debiaiServer/controller/data.py | 4 +++- debiaiServer/controller/dataProviders.py | 4 +++- debiaiServer/controller/models.py | 4 +++- debiaiServer/controller/projects.py | 4 +++- debiaiServer/controller/pythonModuleDp.py | 4 +++- debiaiServer/controller/selection.py | 4 +++- debiaiServer/modules/algoProviders/AlgoProvider.py | 4 +++- .../integratedAlgoProvider/integratedAlgoProvider.py | 4 +++- debiaiServer/modules/dataProviders/dataProviderManager.py | 4 +++- .../dataProviders/pythonDataProvider/PythonDataProvider.py | 4 +++- .../dataProviders/pythonDataProvider/dataUtils/hash.py | 4 +++- .../dataProviders/pythonDataProvider/dataUtils/models.py | 4 +++- .../dataProviders/pythonDataProvider/dataUtils/projects.py | 1 - .../modules/dataProviders/webDataProvider/WebDataProvider.py | 5 ++++- .../modules/dataProviders/webDataProvider/http/api.py | 4 +++- .../modules/dataProviders/webDataProvider/useCases/models.py | 4 +++- .../dataProviders/webDataProvider/useCases/selections.py | 4 +++- debiaiServer/modules/exportMethods/exportUtils.py | 4 +++- 19 files changed, 55 insertions(+), 19 deletions(-) diff --git a/debiaiServer/controller/algoProviders.py b/debiaiServer/controller/algoProviders.py index b3075854a..ad2c31a95 100644 --- a/debiaiServer/controller/algoProviders.py +++ b/debiaiServer/controller/algoProviders.py @@ -4,7 +4,9 @@ from debiaiServer.config.init_config import get_config from debiaiServer.utils.utils import is_url_valid, is_valid_name import debiaiServer.modules.algoProviders.algoProvidersManager as algo_provider_manager -from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.modules.algoProviders.AlgoProviderException import ( + AlgoProviderException, +) from debiaiServer.modules.algoProviders.AlgoProvider import AlgoProvider ############################################################################# diff --git a/debiaiServer/controller/data.py b/debiaiServer/controller/data.py index 12437a088..0a2bf2f0f 100644 --- a/debiaiServer/controller/data.py +++ b/debiaiServer/controller/data.py @@ -2,7 +2,9 @@ # Imports ############################################################################# import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) ############################################################################# # Data Management diff --git a/debiaiServer/controller/dataProviders.py b/debiaiServer/controller/dataProviders.py index 0ceeceadb..4baa0c759 100644 --- a/debiaiServer/controller/dataProviders.py +++ b/debiaiServer/controller/dataProviders.py @@ -7,7 +7,9 @@ ) from debiaiServer.utils.utils import is_url_valid import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) ############################################################################# # Data Providers Management diff --git a/debiaiServer/controller/models.py b/debiaiServer/controller/models.py index 160fd7170..890ea7a2d 100644 --- a/debiaiServer/controller/models.py +++ b/debiaiServer/controller/models.py @@ -3,7 +3,9 @@ ############################################################################# import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) ############################################################################# # MODELS Management diff --git a/debiaiServer/controller/projects.py b/debiaiServer/controller/projects.py index 06bbb801d..48b3cbe22 100644 --- a/debiaiServer/controller/projects.py +++ b/debiaiServer/controller/projects.py @@ -1,7 +1,9 @@ ############################################################################# # Imports ############################################################################# -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager ############################################################################# diff --git a/debiaiServer/controller/pythonModuleDp.py b/debiaiServer/controller/pythonModuleDp.py index b9c8cae9b..a35eacc62 100644 --- a/debiaiServer/controller/pythonModuleDp.py +++ b/debiaiServer/controller/pythonModuleDp.py @@ -1,4 +1,6 @@ -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager diff --git a/debiaiServer/controller/selection.py b/debiaiServer/controller/selection.py index 9b22ca709..85db0bba1 100644 --- a/debiaiServer/controller/selection.py +++ b/debiaiServer/controller/selection.py @@ -3,7 +3,9 @@ ############################################################################# import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) ############################################################################# # Selections Management diff --git a/debiaiServer/modules/algoProviders/AlgoProvider.py b/debiaiServer/modules/algoProviders/AlgoProvider.py index e48944d1d..549cf7d2e 100644 --- a/debiaiServer/modules/algoProviders/AlgoProvider.py +++ b/debiaiServer/modules/algoProviders/AlgoProvider.py @@ -1,7 +1,9 @@ # Class for AlgoProvider import requests import json -from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.modules.algoProviders.AlgoProviderException import ( + AlgoProviderException, +) class AlgoProvider: diff --git a/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py b/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py index 40483e69c..6352e75b3 100644 --- a/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py +++ b/debiaiServer/modules/algoProviders/integratedAlgoProvider/integratedAlgoProvider.py @@ -3,7 +3,9 @@ from debiaiServer.config.init_config import DEBUG_COLOR from debiaiServer.modules.algoProviders.AlgoProvider import AlgoProvider -from debiaiServer.modules.algoProviders.AlgoProviderException import AlgoProviderException +from debiaiServer.modules.algoProviders.AlgoProviderException import ( + AlgoProviderException, +) def _get_algorithm_python(algorithm_name): diff --git a/debiaiServer/modules/dataProviders/dataProviderManager.py b/debiaiServer/modules/dataProviders/dataProviderManager.py index 67ddbdc59..8dace509a 100644 --- a/debiaiServer/modules/dataProviders/dataProviderManager.py +++ b/debiaiServer/modules/dataProviders/dataProviderManager.py @@ -13,7 +13,9 @@ PythonDataProvider, PYTHON_DATA_PROVIDER_ID, ) -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) data_providers_list = [] python_data_provider_disabled = True diff --git a/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py b/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py index ef017dd53..5f35a9918 100644 --- a/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/PythonDataProvider.py @@ -1,6 +1,8 @@ from debiaiServer.config.init_config import get_config from debiaiServer.modules.dataProviders.DataProvider import DataProvider -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( pythonModuleUtils, projects, diff --git a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py index 8459eb6bc..a231d24d2 100644 --- a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/hash.py @@ -1,7 +1,9 @@ import hashlib import ujson as json -from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import pythonModuleUtils +from debiaiServer.modules.dataProviders.pythonDataProvider.dataUtils import ( + pythonModuleUtils, +) DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py index d61210a8f..2214bd85b 100644 --- a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/models.py @@ -5,7 +5,9 @@ projects, tree, ) -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) DATA_PATH = pythonModuleUtils.DATA_PATH diff --git a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py index b1185dd68..ad070a530 100644 --- a/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py +++ b/debiaiServer/modules/dataProviders/pythonDataProvider/dataUtils/projects.py @@ -131,7 +131,6 @@ def update_project(projectId): def get_project_block_level_info(projectId): - data = _get_project_info(projectId) if "blockLevelInfo" in data: return data["blockLevelInfo"] diff --git a/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py b/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py index dc558aff1..3f1d475f7 100644 --- a/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/WebDataProvider.py @@ -15,7 +15,10 @@ delete_model, ) import debiaiServer.modules.dataProviders.webDataProvider.useCases.selections as useCaseSelections # noqa -from debiaiServer.modules.dataProviders.webDataProvider.http.api import get_info, get_status +from debiaiServer.modules.dataProviders.webDataProvider.http.api import ( + get_info, + get_status, +) from debiaiServer.modules.dataProviders.webDataProvider.cache.cache import Cache diff --git a/debiaiServer/modules/dataProviders/webDataProvider/http/api.py b/debiaiServer/modules/dataProviders/webDataProvider/http/api.py index 3aee3a731..d78c02e46 100644 --- a/debiaiServer/modules/dataProviders/webDataProvider/http/api.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/http/api.py @@ -1,6 +1,8 @@ import requests import json -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) # Todo : change info if in not alive anymore diff --git a/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py index 655cf1fc2..b33f3c2d6 100644 --- a/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/models.py @@ -1,5 +1,7 @@ import debiaiServer.modules.dataProviders.webDataProvider.http.api as api -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) def get_models_info(url, project_id): diff --git a/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py b/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py index 6d3bd8723..b8b9902ed 100644 --- a/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py +++ b/debiaiServer/modules/dataProviders/webDataProvider/useCases/selections.py @@ -1,5 +1,7 @@ import debiaiServer.modules.dataProviders.webDataProvider.http.api as api -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) def get_project_selections(url, project_id): diff --git a/debiaiServer/modules/exportMethods/exportUtils.py b/debiaiServer/modules/exportMethods/exportUtils.py index 4f136d567..e3a094b9e 100644 --- a/debiaiServer/modules/exportMethods/exportUtils.py +++ b/debiaiServer/modules/exportMethods/exportUtils.py @@ -1,6 +1,8 @@ from debiaiServer.config.init_config import get_config import debiaiServer.modules.dataProviders.dataProviderManager as data_provider_manager -from debiaiServer.modules.dataProviders.DataProviderException import DataProviderException +from debiaiServer.modules.dataProviders.DataProviderException import ( + DataProviderException, +) import time from debiaiServer.modules.exportMethods.methods.kafkaUtils import KafkaExportType From 5601139fe7ff7cc8150d82ff17b5095e606d9c15 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 17:07:03 +0200 Subject: [PATCH 85/97] update version --- debiaiServer/swagger.yaml | 2 +- frontend/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index ad28f5784..a6cca5f51 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -1,6 +1,6 @@ swagger: "2.0" info: - version: 0.28.0-beta1 + version: 0.28.0 title: DebiAI_BACKEND_API description: DebiAI backend api contact: diff --git a/frontend/package.json b/frontend/package.json index 9c103f2b1..5c910b854 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "debiai_frontend", - "version": "0.29.3", + "version": "0.28.0", "description": "Frontend for Debiai, made with Vuejs", "license": "Apache-2.0", "scripts": { From 52d991c8250a08f6fcc8b42b54cf36deb4a76343 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 17:10:32 +0200 Subject: [PATCH 86/97] flake --- debiaiServer/debiai_gui_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/debiaiServer/debiai_gui_utils.py b/debiaiServer/debiai_gui_utils.py index bb2b46f87..51b104819 100644 --- a/debiaiServer/debiai_gui_utils.py +++ b/debiaiServer/debiai_gui_utils.py @@ -1,4 +1,3 @@ -import sys import argparse from threading import Timer from termcolor import colored From bfa4b69aaf21c0e23290ec2d25ac7e32a3992fb6 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Fri, 19 Jul 2024 17:14:26 +0200 Subject: [PATCH 87/97] dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 00eb9d1eb..3679d4705 100644 --- a/Dockerfile +++ b/Dockerfile @@ -10,7 +10,7 @@ FROM python:3.10.12-slim-bullseye WORKDIR / COPY debiaiServer/ debiaiServer/ RUN pip install --trusted-host pypi.python.org -r debiaiServer/requirements.txt -COPY run.py . +COPY run_debiai_server_prod.py . COPY --from=build-stage /frontend/dist debiaiServer/dist ENV FLASK_ENV production CMD ["python", "run.py"] From c0ca1215bcfc71d356d528c1e001c958cdde385f Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Mon, 22 Jul 2024 10:17:58 +0200 Subject: [PATCH 88/97] script to run in dev and rename of paths --- Dockerfile | 2 +- debiaiServer/swagger.yaml | 90 +++++++++++++++++++-------------------- run_debiai_server_dev.py | 5 +++ 3 files changed, 51 insertions(+), 46 deletions(-) create mode 100644 run_debiai_server_dev.py diff --git a/Dockerfile b/Dockerfile index 3679d4705..119a86475 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,5 +13,5 @@ RUN pip install --trusted-host pypi.python.org -r debiaiServer/requirements.txt COPY run_debiai_server_prod.py . COPY --from=build-stage /frontend/dist debiaiServer/dist ENV FLASK_ENV production -CMD ["python", "run.py"] +CMD ["python", "run_debiai_server_prod.py"] diff --git a/debiaiServer/swagger.yaml b/debiaiServer/swagger.yaml index a6cca5f51..98ae4fc6c 100644 --- a/debiaiServer/swagger.yaml +++ b/debiaiServer/swagger.yaml @@ -12,7 +12,7 @@ paths: /version: get: summary: Ping to check if the backend is running - operationId: controller.projects.ping + operationId: debiaiServer.controller.projects.ping responses: 200: description: The server is online @@ -22,7 +22,7 @@ paths: get: summary: Get data providers list and status tags: [Data Providers] - operationId: controller.dataProviders.get_data_providers + operationId: debiaiServer.controller.dataProviders.get_data_providers responses: 200: description: List of data providers @@ -34,7 +34,7 @@ paths: post: summary: Add data provider to data providers list tags: [Data Providers] - operationId: controller.dataProviders.post_data_providers + operationId: debiaiServer.controller.dataProviders.post_data_providers parameters: - name: data in: body @@ -63,7 +63,7 @@ paths: delete: summary: Delete data providers from the list tags: [Data Providers] - operationId: controller.dataProviders.delete_data_providers + operationId: debiaiServer.controller.dataProviders.delete_data_providers parameters: - name: dataProviderId in: path @@ -80,7 +80,7 @@ paths: get: summary: Get general informations about a data provider, like his version or the max number sample for each type of request tags: [Data Providers] - operationId: controller.dataProviders.get_data_provider_info + operationId: debiaiServer.controller.dataProviders.get_data_provider_info parameters: - name: dataProviderId in: path @@ -126,7 +126,7 @@ paths: get: summary: Get the projects overview tags: [Project] - operationId: controller.projects.get_projects + operationId: debiaiServer.controller.projects.get_projects responses: 200: description: List of project overviews @@ -138,7 +138,7 @@ paths: post: summary: Post a new project tags: [Project] - operationId: controller.pythonModuleDp.post_project + operationId: debiaiServer.controller.pythonModuleDp.post_project parameters: - name: data in: body @@ -179,7 +179,7 @@ paths: get: summary: Get the projects overview for a data provider tags: [Project] - operationId: controller.projects.get_data_providers_project + operationId: debiaiServer.controller.projects.get_data_providers_project parameters: - name: dataProviderId in: path @@ -197,7 +197,7 @@ paths: get: summary: Get project name, nb of models & nb of selections (overviews of a project) tags: [Project] - operationId: controller.projects.get_project + operationId: debiaiServer.controller.projects.get_project parameters: - name: dataProviderId in: path @@ -216,7 +216,7 @@ paths: delete: summary: remove a project from ID tags: [Project] - operationId: controller.projects.delete_project + operationId: debiaiServer.controller.projects.delete_project parameters: - name: dataProviderId in: path @@ -236,7 +236,7 @@ paths: post: summary: Get the project data id list tags: [Project] - operationId: controller.projects.get_data_id_list + operationId: debiaiServer.controller.projects.get_data_id_list parameters: - name: dataProviderId in: path @@ -290,7 +290,7 @@ paths: post: summary: add a new data blocks level structure tags: [Project] - operationId: controller.pythonModuleDp.post_block_levels + operationId: debiaiServer.controller.pythonModuleDp.post_block_levels parameters: - name: dataProviderId in: path @@ -348,7 +348,7 @@ paths: post: summary: add a new expected results structure tags: [Project] - operationId: controller.pythonModuleDp.post_resultsStructure + operationId: debiaiServer.controller.pythonModuleDp.post_resultsStructure parameters: - name: dataProviderId in: path @@ -399,7 +399,7 @@ paths: post: summary: add a model tags: [Model] - operationId: controller.models.post_model + operationId: debiaiServer.controller.models.post_model parameters: - name: dataProviderId in: path @@ -436,7 +436,7 @@ paths: get: summary: Get a model results id list tags: [Model] - operationId: controller.models.get_model_id_list + operationId: debiaiServer.controller.models.get_model_id_list parameters: - name: dataProviderId in: path @@ -463,7 +463,7 @@ paths: delete: summary: remove a model tags: [Model] - operationId: controller.models.delete_model + operationId: debiaiServer.controller.models.delete_model parameters: - name: dataProviderId in: path @@ -487,7 +487,7 @@ paths: post: summary: Add a results to a model tags: [Model] - operationId: controller.pythonModuleDp.add_results_dict + operationId: debiaiServer.controller.pythonModuleDp.add_results_dict parameters: - name: dataProviderId in: path @@ -528,7 +528,7 @@ paths: post: summary: Get the model results from a sample list tags: [Model] - operationId: controller.models.get_results + operationId: debiaiServer.controller.models.get_results parameters: - name: dataProviderId in: path @@ -570,7 +570,7 @@ paths: post: summary: add a tree to an existing project block tree tags: [Block] - operationId: controller.pythonModuleDp.post_block_tree + operationId: debiaiServer.controller.pythonModuleDp.post_block_tree parameters: - name: dataProviderId in: path @@ -605,7 +605,7 @@ paths: post: summary: get a project tree form a sample list tags: [Block] - operationId: controller.data.get_data + operationId: debiaiServer.controller.data.get_data parameters: - name: dataProviderId in: path @@ -647,7 +647,7 @@ paths: get: summary: Get the project selections tags: [Selection] - operationId: controller.selection.get_selections + operationId: debiaiServer.controller.selection.get_selections parameters: - name: dataProviderId in: path @@ -668,7 +668,7 @@ paths: post: summary: add a selection tags: [Selection] - operationId: controller.selection.post_selection + operationId: debiaiServer.controller.selection.post_selection parameters: - name: dataProviderId in: path @@ -710,7 +710,7 @@ paths: get: summary: Get a project selection id list tags: [Selection] - operationId: controller.selection.get_selection_id_list + operationId: debiaiServer.controller.selection.get_selection_id_list parameters: - name: dataProviderId in: path @@ -737,7 +737,7 @@ paths: delete: summary: delete a selection tags: [Selection] - operationId: controller.selection.delete_selection + operationId: debiaiServer.controller.selection.delete_selection parameters: - name: dataProviderId in: path @@ -760,7 +760,7 @@ paths: get: summary: Get all layouts tags: [Layouts] - operationId: controller.layouts.get_layouts + operationId: debiaiServer.controller.layouts.get_layouts responses: 200: description: Layouts for all projects @@ -772,7 +772,7 @@ paths: post: summary: Add a layout tags: [Layouts] - operationId: controller.layouts.post_layout + operationId: debiaiServer.controller.layouts.post_layout parameters: - name: data in: body @@ -819,7 +819,7 @@ paths: delete: summary: Delete a layout tags: [Layouts] - operationId: controller.layouts.delete_layout + operationId: debiaiServer.controller.layouts.delete_layout parameters: - name: id in: path @@ -839,7 +839,7 @@ paths: summary: Get all widget configurations overview, return the number of configurations for each widget tags: [Widget configurations] - operationId: controller.widgetConfigurations.get_all_configurations + operationId: debiaiServer.controller.widgetConfigurations.get_all_configurations responses: 200: description: Widget configurations number for each widget @@ -855,7 +855,7 @@ paths: get: summary: Get the widget configurations tags: [Widget configurations] - operationId: controller.widgetConfigurations.get_widget_configurations + operationId: debiaiServer.controller.widgetConfigurations.get_widget_configurations parameters: - name: widgetKey in: path @@ -897,7 +897,7 @@ paths: post: summary: Add a widget configuration tags: [Widget configurations] - operationId: controller.widgetConfigurations.post_configuration + operationId: debiaiServer.controller.widgetConfigurations.post_configuration parameters: - name: widgetKey in: path @@ -942,7 +942,7 @@ paths: delete: summary: Delete a widget configuration tags: [Widget configurations] - operationId: controller.widgetConfigurations.delete_configuration + operationId: debiaiServer.controller.widgetConfigurations.delete_configuration parameters: - name: widgetKey in: path @@ -966,7 +966,7 @@ paths: get: summary: Get the application export methods tags: [Export] - operationId: controller.exportMethods.get_export_methods + operationId: debiaiServer.controller.exportMethods.get_export_methods responses: 200: description: Export method list @@ -997,7 +997,7 @@ paths: post: summary: Create an export method for the app tags: [Export] - operationId: controller.exportMethods.post_export_method + operationId: debiaiServer.controller.exportMethods.post_export_method parameters: - name: data in: body @@ -1029,7 +1029,7 @@ paths: delete: summary: Remove an export method for the app tags: [Export] - operationId: controller.exportMethods.delete_export_method + operationId: debiaiServer.controller.exportMethods.delete_export_method parameters: - name: exportMethodId in: path @@ -1045,7 +1045,7 @@ paths: post: summary: Export data with an export method tags: [Export] - operationId: controller.exportMethods.exportData + operationId: debiaiServer.controller.exportMethods.exportData parameters: - name: exportMethodId in: path @@ -1063,7 +1063,7 @@ paths: post: summary: Export a selected sample id list from an export method tags: [Export] - operationId: controller.exportMethods.exportSelection + operationId: debiaiServer.controller.exportMethods.exportSelection parameters: - name: dataProviderId in: path @@ -1103,7 +1103,7 @@ paths: get: summary: Get all Algo providers and their algorithms tags: [AlgoProviders] - operationId: controller.algoProviders.get_algo_providers + operationId: debiaiServer.controller.algoProviders.get_algo_providers responses: 200: description: Algorithms list @@ -1116,7 +1116,7 @@ paths: post: summary: Add an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.post_algo_provider + operationId: debiaiServer.controller.algoProviders.post_algo_provider parameters: - name: data in: body @@ -1144,7 +1144,7 @@ paths: delete: summary: Delete an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.delete_algo_provider + operationId: debiaiServer.controller.algoProviders.delete_algo_provider parameters: - name: name in: path @@ -1162,7 +1162,7 @@ paths: post: summary: Use an algorithm of an Algo provider tags: [AlgoProviders] - operationId: controller.algoProviders.use_algo + operationId: debiaiServer.controller.algoProviders.use_algo parameters: - name: algoProviderName in: path @@ -1229,7 +1229,7 @@ paths: post: summary: Calculate pearson correlation between rows tags: [Statistical operations] - operationId: controller.statisticalOperations.pearsonCorrelation + operationId: debiaiServer.controller.statisticalOperations.pearsonCorrelation parameters: - name: data in: body @@ -1260,7 +1260,7 @@ paths: post: summary: Calculate spearman correlation between rows tags: [Statistical operations] - operationId: controller.statisticalOperations.spearmanCorrelation + operationId: debiaiServer.controller.statisticalOperations.spearmanCorrelation parameters: - name: data in: body @@ -1291,7 +1291,7 @@ paths: post: summary: Calculate mutual informations tags: [Statistical operations] - operationId: controller.statisticalOperations.mutualInformation + operationId: debiaiServer.controller.statisticalOperations.mutualInformation parameters: - name: data in: body @@ -1343,7 +1343,7 @@ paths: post: summary: Calculate the mutual information between variables tags: [Statistical operations] - operationId: controller.statisticalOperations.higherDimensionMutualInformation + operationId: debiaiServer.controller.statisticalOperations.higherDimensionMutualInformation parameters: - name: data in: body @@ -1380,7 +1380,7 @@ paths: post: summary: Calculate matrix mutual informations and the higher Dimension tags: [Statistical operations] - operationId: controller.statisticalOperations.mutualAndHigherInformation + operationId: debiaiServer.controller.statisticalOperations.mutualAndHigherInformation parameters: - name: data in: body diff --git a/run_debiai_server_dev.py b/run_debiai_server_dev.py new file mode 100644 index 000000000..048aedc65 --- /dev/null +++ b/run_debiai_server_dev.py @@ -0,0 +1,5 @@ +# This file is used to run the debiai-gui server in development mode + +from debiaiServer import websrv + +websrv.start_server(port=3000, reloader=False, is_dev=True) From 9cf823f7356f73a3033591c206fc969c904dda15 Mon Sep 17 00:00:00 2001 From: "tom.mansion" Date: Mon, 22 Jul 2024 10:38:41 +0200 Subject: [PATCH 89/97] Fixed issue with dependencies --- debiaiServer/controller/statisticalOperations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/debiaiServer/controller/statisticalOperations.py b/debiaiServer/controller/statisticalOperations.py index dcc3482d4..587373a67 100644 --- a/debiaiServer/controller/statisticalOperations.py +++ b/debiaiServer/controller/statisticalOperations.py @@ -2,10 +2,10 @@ import numpy as np import numpy.random as nr -from scipy.stats.stats import pearsonr, spearmanr +from scipy.stats import pearsonr, spearmanr from scipy.special import digamma import scipy.spatial as ss -from scipy.spatial.ckdtree import cKDTree +from scipy.spatial import cKDTree from sklearn.neighbors import NearestNeighbors from math import log, fabs, sqrt From 2331a476e719aad70fac524bac8bef20377599b8 Mon Sep 17 00:00:00 2001 From: "tom.mansion" Date: Mon, 22 Jul 2024 11:09:42 +0200 Subject: [PATCH 90/97] Fixed issue with Flask env --- Dockerfile | 1 - debiaiServer/websrv.py | 15 ++++++--------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index 119a86475..b99a940ec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,6 +12,5 @@ COPY debiaiServer/ debiaiServer/ RUN pip install --trusted-host pypi.python.org -r debiaiServer/requirements.txt COPY run_debiai_server_prod.py . COPY --from=build-stage /frontend/dist debiaiServer/dist -ENV FLASK_ENV production CMD ["python", "run_debiai_server_prod.py"] diff --git a/debiaiServer/websrv.py b/debiaiServer/websrv.py index 7207441e1..cc3e6ef59 100644 --- a/debiaiServer/websrv.py +++ b/debiaiServer/websrv.py @@ -1,4 +1,3 @@ -import os import psutil import requests import connexion @@ -18,14 +17,12 @@ CORS(app.app) -def send_frontend(path): +def send_frontend(path, is_dev): if path == "/": path = "index.html" # If production, use the index.html from the dist folder - env = os.getenv("FLASK_ENV", "production") - debug_mode = env == "production" - if debug_mode: + if not is_dev: return send_from_directory("dist", path) # In development, redirect to the DEV_FRONTEND_URL @@ -57,16 +54,16 @@ def send_frontend(path): print("Unexpected request method") -def create_app(): +def create_app(is_dev): # For serving the dashboard @app.route("/") def send_index(): - return send_frontend("/") + return send_frontend("/", is_dev) # For serving the dashboard assets @app.route("/") def send_supporting_elements(path): - return send_frontend(path) + return send_frontend(path, is_dev) return app @@ -102,7 +99,7 @@ def start_server(port, reloader=True, is_dev=True): + colored("http://localhost:" + str(port), DEBUG_COLOR), flush=True, ) - app = create_app() + app = create_app(is_dev) if is_dev: # Use flask server for development app.run(port, debug=True, host="0.0.0.0", use_reloader=reloader) From c3cb816bdf833106afb56dec2fcf7fa3d1c19cff Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 24 Jul 2024 16:00:03 +0200 Subject: [PATCH 91/97] new name for module and correction to workflow --- .github/workflows/debiai-gui-publish.yml | 3 +-- debiaiServer/debiai_gui_utils.py | 4 ++-- setup.py | 4 ++-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai-gui-publish.yml index 77f019822..8f228161f 100644 --- a/.github/workflows/debiai-gui-publish.yml +++ b/.github/workflows/debiai-gui-publish.yml @@ -1,4 +1,4 @@ -# This workflow will upload a Debiai-gui Package using Twine when the main branch is updated +# This workflow will upload a Debiai-gui Package using Twine when the main branch is updated # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries # This workflow uses actions that are not certified by GitHub. @@ -12,7 +12,6 @@ on: push: branches: - main - - 205-debiai-easy-start-module-to-launch-debiai-standlalone jobs: deploy: runs-on: ubuntu-latest diff --git a/debiaiServer/debiai_gui_utils.py b/debiaiServer/debiai_gui_utils.py index 51b104819..5fb8b23eb 100644 --- a/debiaiServer/debiai_gui_utils.py +++ b/debiaiServer/debiai_gui_utils.py @@ -27,11 +27,11 @@ def parse_arguments(): def bash_info(): print( - colored("Usage: ", DEBUG_COLOR) + "debiai-gui [OPTIONS] COMMAND\n\n" + colored("Usage: ", DEBUG_COLOR) + "debiai_gui [OPTIONS] COMMAND\n\n" "\t" "Use the line below to run the app: \n\n" "\t\t" - "$ debiai-gui" + "$ debiai_gui" + colored(" start\n\n") + colored("Options:\n", DEBUG_COLOR) + "\t" diff --git a/setup.py b/setup.py index 00d038ae4..e7ed9c194 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ VERSION = get_app_version() setup( - name="debiai-gui", + name="debiai_gui", version=VERSION, packages=find_packages(include=["debiaiServer", "debiaiServer.*"]), include_package_data=True, @@ -30,7 +30,7 @@ ], entry_points={ "console_scripts": [ - "debiai-gui=debiaiServer.debiai_gui_utils:run", + "debiai_gui=debiaiServer.debiai_gui_utils:run", ], }, author="IRT-Systemx", From 3bb9d39bcd5d7ee61d5f5f2cac2d6effead99cf2 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 24 Jul 2024 17:25:47 +0200 Subject: [PATCH 92/97] coorected name --- build_and_run.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build_and_run.sh b/build_and_run.sh index fd985743c..c97f266c3 100755 --- a/build_and_run.sh +++ b/build_and_run.sh @@ -8,4 +8,4 @@ python3 setup.py sdist bdist_wheel pip install . # Run the package -debiai-gui start +debiai_gui start From 76035bb1c00c7e283f847bfb361be6ce50f5a630 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Wed, 24 Jul 2024 17:30:51 +0200 Subject: [PATCH 93/97] renamed workflow --- .../workflows/{debiai-gui-publish.yml => debiai_gui-publish.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{debiai-gui-publish.yml => debiai_gui-publish.yml} (100%) diff --git a/.github/workflows/debiai-gui-publish.yml b/.github/workflows/debiai_gui-publish.yml similarity index 100% rename from .github/workflows/debiai-gui-publish.yml rename to .github/workflows/debiai_gui-publish.yml From ba60ea07926326d58e1ed8ac9611d59ae07012e7 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 25 Jul 2024 11:07:11 +0200 Subject: [PATCH 94/97] test for backend --- .github/workflows/pull-request-check.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/pull-request-check.yml b/.github/workflows/pull-request-check.yml index 08988c41f..7ff2d01d8 100644 --- a/.github/workflows/pull-request-check.yml +++ b/.github/workflows/pull-request-check.yml @@ -152,8 +152,7 @@ jobs: pip install -r requirements.txt - name: Test with pytest run: | - cd debiaiServer - python websrv.py & sleep 5 && pytest tests/ + python run_debiai_server_dev.py & sleep 5 && pytest tests/ docker-build-check: # Build the docker image and check that it can run name: Docker build check From 5560a14bedd267534d360562e2a7fcece4293e5e Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 25 Jul 2024 11:09:49 +0200 Subject: [PATCH 95/97] corrected path --- .github/workflows/pull-request-check.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull-request-check.yml b/.github/workflows/pull-request-check.yml index 7ff2d01d8..c66487bac 100644 --- a/.github/workflows/pull-request-check.yml +++ b/.github/workflows/pull-request-check.yml @@ -152,7 +152,7 @@ jobs: pip install -r requirements.txt - name: Test with pytest run: | - python run_debiai_server_dev.py & sleep 5 && pytest tests/ + python run_debiai_server_dev.py & sleep 5 && pytest debiaiServer/tests/ docker-build-check: # Build the docker image and check that it can run name: Docker build check From 14fb10b844cbcfae57eb41598616384f3319f7d5 Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 25 Jul 2024 12:40:48 +0200 Subject: [PATCH 96/97] review --- MANIFEST.in | 2 -- build_and_run.sh | 11 ----------- run_debiai_server_dev.py | 2 +- 3 files changed, 1 insertion(+), 14 deletions(-) delete mode 100755 build_and_run.sh diff --git a/MANIFEST.in b/MANIFEST.in index 467a68550..84572565b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,3 @@ -include debiaiServer/swagger.yaml -include debiaiServer/config/config.env include debiaiServer/config/config.ini include images/ * recursive-include debiaiServer/dist * diff --git a/build_and_run.sh b/build_and_run.sh deleted file mode 100755 index c97f266c3..000000000 --- a/build_and_run.sh +++ /dev/null @@ -1,11 +0,0 @@ -# Remove previous build and dist directories -rm -rf build dist debiaiServer.egg-info - -# Generated source distribution and wheel distribution -python3 setup.py sdist bdist_wheel - -# Install the package -pip install . - -# Run the package -debiai_gui start diff --git a/run_debiai_server_dev.py b/run_debiai_server_dev.py index 048aedc65..68721bb75 100644 --- a/run_debiai_server_dev.py +++ b/run_debiai_server_dev.py @@ -2,4 +2,4 @@ from debiaiServer import websrv -websrv.start_server(port=3000, reloader=False, is_dev=True) +websrv.start_server(port=3000, reloader=True, is_dev=True) From 76889190de69678375e69027e0ee9201398fc1bc Mon Sep 17 00:00:00 2001 From: FadyCoding Date: Thu, 25 Jul 2024 14:12:43 +0200 Subject: [PATCH 97/97] added swagger file --- MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/MANIFEST.in b/MANIFEST.in index 84572565b..739faf27b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ +include debiaiServer/swagger.yaml include debiaiServer/config/config.ini include images/ * recursive-include debiaiServer/dist *