From b8004105968ba4179eb488518906979d7fd407be Mon Sep 17 00:00:00 2001 From: sostrades-dpeltre Date: Wed, 10 Apr 2024 10:40:11 +0200 Subject: [PATCH 01/73] fix error in callback adress --- sos_trades_api/routes/data/authentication_module.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sos_trades_api/routes/data/authentication_module.py b/sos_trades_api/routes/data/authentication_module.py index 3a74266c..36615d52 100644 --- a/sos_trades_api/routes/data/authentication_module.py +++ b/sos_trades_api/routes/data/authentication_module.py @@ -289,7 +289,7 @@ def authenticate_with_keycloak(): # Get authorization URL keycloak = KeycloakAuthenticator() - auth_url = keycloak.auth_url("https://revison.gpp-sostrades.com/api/data/keycloak/callback") + auth_url = keycloak.auth_url("https://revision.gpp-sostrades.com/api/data/keycloak/callback") return redirect(auth_url) @@ -303,7 +303,7 @@ def callback(): keycloak = KeycloakAuthenticator() code = request.args.get('code') - token = keycloak.token("https://revison.gpp-sostrades.com/api/data/keycloak/callback", code) + token = keycloak.token("https://revision.gpp-sostrades.com/api/data/keycloak/callback", code) userinfo = keycloak.user_info(token['access_token']) access_token, refresh_token, return_url, user = authenticate_user_keycloak(userinfo) From 0155e410c80c35858fbfb292a20418aa616be090 Mon Sep 17 00:00:00 2001 From: sostrades-dpeltre Date: Thu, 2 May 2024 06:23:53 +0200 Subject: [PATCH 02/73] fix --- sos_trades_api/config.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sos_trades_api/config.py b/sos_trades_api/config.py index f92b6e1c..791eb159 100644 --- a/sos_trades_api/config.py +++ b/sos_trades_api/config.py @@ -615,7 +615,7 @@ def kubernetes_flavor_config_for_exec(self): :rtype: dict :raises KeyError: If CONFIG_FLAVOR_KUBERNETES key is not found. If Kubernetes flavor configuration is not valid. """ - if self.kubernetes_flavor_for_exec is None: + if self.__kubernetes_flavor_for_exec is None: if self.CONFIG_FLAVOR_KUBERNETES not in self.__server_config_file: raise KeyError("CONFIG_FLAVOR_KUBERNETES is not in configuration file") @@ -627,10 +627,10 @@ def kubernetes_flavor_config_for_exec(self): self.__validate_flavor(kubernetes_flavor["PodExec"]) - self.kubernetes_flavor_for_exec = kubernetes_flavor["PodExec"] - - return self.kubernetes_flavor_config_for_exec + self.__kubernetes_flavor_for_exec = kubernetes_flavor["PodExec"] + return self.__kubernetes_flavor_for_exec + @staticmethod def __validate_flavor(list_flavors:dict): """Validate Kubernetes flavor configuration. From 9bfd0217216027207b516ee91c9928740e295f9e Mon Sep 17 00:00:00 2001 From: sostrades-dpeltre Date: Mon, 6 May 2024 11:45:55 +0200 Subject: [PATCH 03/73] add authrequired route and check kube mode --- sos_trades_api/config.py | 12 +++++++----- .../routes/data/kubernetes_flavor_module.py | 2 ++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/sos_trades_api/config.py b/sos_trades_api/config.py index 791eb159..0a4903f3 100644 --- a/sos_trades_api/config.py +++ b/sos_trades_api/config.py @@ -135,11 +135,13 @@ def check(self): eeb_filepath = self.eeb_filepath manifest_folder_path = self.manifests_folder_path - deployment_study_server_filepath = self.deployment_study_server_filepath - service_study_server_filepath = self.service_study_server_filepath - - kubernetes_flavor_config_for_study = self.kubernetes_flavor_config_for_study - kubernetes_flavor_config_for_exec = self.kubernetes_flavor_config_for_exec + + if self.server_mode == self.CONFIG_SERVER_MODE_K8S: + deployment_study_server_filepath = self.deployment_study_server_filepath + service_study_server_filepath = self.service_study_server_filepath + + kubernetes_flavor_config_for_study = self.kubernetes_flavor_config_for_study + kubernetes_flavor_config_for_exec = self.kubernetes_flavor_config_for_exec # pylint: enable=unused-variable @property diff --git a/sos_trades_api/routes/data/kubernetes_flavor_module.py b/sos_trades_api/routes/data/kubernetes_flavor_module.py index 6ee28cfd..ce4945ed 100644 --- a/sos_trades_api/routes/data/kubernetes_flavor_module.py +++ b/sos_trades_api/routes/data/kubernetes_flavor_module.py @@ -18,6 +18,7 @@ from flask import jsonify, make_response @app.route(f'/api/data/flavors/study', methods=['GET']) +@auth_required def get_flavors_config_study(): """ retrieve flavors from the configuration @@ -29,6 +30,7 @@ def get_flavors_config_study(): return make_response(jsonify(all_flavor_names), 200) @app.route(f'/api/data/flavors/exec', methods=['GET']) +@auth_required def get_flavors_config_exec(): """ retrieve flavors from the configuration From 525d42c68f579ab4615e42c09475fa93025dd3cd Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 11 Jun 2024 08:42:45 +0000 Subject: [PATCH 04/73] pip-compile of api.requirements.txt --- api.requirements.txt | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/api.requirements.txt b/api.requirements.txt index 5c32870d..c20bb6a9 100644 --- a/api.requirements.txt +++ b/api.requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --output-file=./sostrades-webapi/api.requirements.txt ./GreenProductPortfolio/requirements.in ./gemseo/requirements.txt ./portfolio-management/requirements.in ./sostrades-core/requirements.in ./sostrades-webapi/requirements.in ./witness-core/requirements.in ./witness-energy/requirements.in +# pip-compile --output-file=./sostrades-webapi/api.requirements.txt ./GreenProductPortfolio/requirements.in ./gemseo/requirements.txt ./industrial-zones/requirements.in ./portfolio-management/requirements.in ./sostrades-core/requirements.in ./sostrades-webapi/requirements.in ./witness-core/requirements.in ./witness-energy/requirements.in # alembic==1.13.1 # via flask-migrate @@ -227,6 +227,8 @@ pdfo==1.0 ; python_version >= "3" # via -r ./gemseo/requirements.txt pillow==10.3.0 # via matplotlib +pint==0.23 + # via -r ./industrial-zones/requirements.in platformdirs==4.2.2 # via black plotly==5.3.0 @@ -427,6 +429,7 @@ typing-extensions==4.12.2 # alembic # black # jwcrypto + # pint tzdata==2024.1 # via pandas urllib3==2.1.0 From c8a911f19c8e57b1172c294899bbcece04ea1dfb Mon Sep 17 00:00:00 2001 From: magueylard Date: Thu, 13 Jun 2024 16:45:57 +0200 Subject: [PATCH 05/73] Add export datasets --- .../sostrades_data/study_case_controller.py | 6 +- .../sostrades_main/study_case_controller.py | 69 ++++++++++++++++++ sos_trades_api/models/database_models.py | 1 + .../routes/main/study_case_module.py | 64 ++++++++++++++++ .../routes/message/socketio_module.py | 14 ++++ sos_trades_api/tools/coedition/coedition.py | 2 + .../tools/loading/loading_study_and_engine.py | 73 +++++++++++++++++++ .../tools/loading/study_case_manager.py | 5 ++ 8 files changed, 232 insertions(+), 2 deletions(-) diff --git a/sos_trades_api/controllers/sostrades_data/study_case_controller.py b/sos_trades_api/controllers/sostrades_data/study_case_controller.py index 1880cce8..1a3334ca 100644 --- a/sos_trades_api/controllers/sostrades_data/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_data/study_case_controller.py @@ -791,7 +791,7 @@ def get_study_case_notifications(study_identifier): notif.message, [], ) - if notif.type == UserCoeditionAction.SAVE: + if notif.type == UserCoeditionAction.SAVE or notif.type == UserCoeditionAction.EXPORT: changes_query = ( StudyCaseChange.query.filter( StudyCaseChange.notification_id == notif.id, @@ -849,8 +849,10 @@ def create_new_notification_after_update_parameter(study_id, change_type, coedit user_coedition_action = getattr(UserCoeditionAction, action) # Determine the coedition message based on the type - if change_type == StudyCaseChange.DATASET_MAPPING_CHANGE: + if change_type == StudyCaseChange.DATASET_MAPPING_CHANGE and user_coedition_action == UserCoeditionAction.SAVE: coedition_message = CoeditionMessage.IMPORT_DATASET + if change_type == StudyCaseChange.DATASET_MAPPING_EXPORT and user_coedition_action == UserCoeditionAction.EXPORT: + coedition_message = CoeditionMessage.EXPORT_DATASET else: coedition_message = CoeditionMessage.SAVE diff --git a/sos_trades_api/controllers/sostrades_main/study_case_controller.py b/sos_trades_api/controllers/sostrades_main/study_case_controller.py index 45dbe692..a5dd112f 100644 --- a/sos_trades_api/controllers/sostrades_main/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_main/study_case_controller.py @@ -90,6 +90,7 @@ invalidate_namespace_after_save, ) from sos_trades_api.tools.loading.loading_study_and_engine import ( + study_case_manager_export_from_dataset_mapping, study_case_manager_loading, study_case_manager_loading_from_reference, study_case_manager_loading_from_study, @@ -588,6 +589,58 @@ def update_study_parameters_from_datasets_mapping(study_id, user, datasets_mappi study_case_cache.release_study_case(study_id) raise StudyCaseError(error) +def export_study_parameters_from_datasets_mapping(study_id, user, datasets_mapping, notification_id): + """ + Export study parameters in datasets defined in the mapping file + :param: study_id, id of the study + :type: integer + :param: user, user that did the modification of parameters + :type: integer + :param: datasets_mapping, namespace+parameter to connector_id+dataset_id+parameter mapping + :type: dict + """ + try: + + # Retrieve study_manager + study_manager = study_case_cache.get_study_case(study_id, False) + + + # Launch load study-case with new parameters from dataset + if notification_id not in study_manager.dataset_export_status_dict.keys(): + + # check that the study is not loading + if study_manager.load_status != LoadStatus.IN_PROGESS: + study_manager.dataset_export_status_dict[notification_id] = LoadStatus.IN_PROGESS + # Deserialize mapping + datasets_mapping_deserialized = DatasetsMapping.deserialize(datasets_mapping) + + threading.Thread( + target=study_case_manager_export_from_dataset_mapping, + args=(study_manager, datasets_mapping_deserialized, notification_id), + ).start() + else: + raise exception("study case is currently loading, please retry the export at the end of the loading.") + # deal with errors + elif study_manager.dataset_export_status_dict[notification_id] == LoadStatus.IN_ERROR: + if notification_id in study_manager.dataset_export_error_dict.keys(): + raise Exception(study_manager.dataset_export_error_dict[notification_id]) + else: + raise Exception("Error while exporting parameters in dataset") + + # return the status of the export + return study_manager.dataset_export_status_dict.get(notification_id, LoadStatus.NONE) + + except DatasetsMappingException as exception : + # Releasing study + study_case_cache.release_study_case(study_id) + app.logger.exception( + f"Error when updating in background (from datasets mapping) {study_manager.study.name}:{exception}") + raise exception + except Exception as error: + # Releasing study + study_case_cache.release_study_case(study_id) + raise StudyCaseError(error) + def get_dataset_import_error_message(study_id): """ Retrieve study manager dataset load error in cache @@ -596,6 +649,22 @@ def get_dataset_import_error_message(study_id): study_manager = study_case_cache.get_study_case(study_id, False) return study_manager.dataset_load_error +def get_dataset_export_status(study_id, notification_id): + """ + Retrieve study manager dataset export status in cache + """ + # Retrieve study_manager + study_manager = study_case_cache.get_study_case(study_id, False) + return study_manager.dataset_export_status_dict.get(notification_id, LoadStatus.NONE) + +def get_dataset_export_error_message(study_id, notification_id): + """ + Retrieve study manager dataset export error in cache + """ + # Retrieve study_manager + study_manager = study_case_cache.get_study_case(study_id, False) + return study_manager.dataset_export_error_dict.get(notification_id, "") + def update_study_parameters(study_id, user, files_list, file_info, parameters_to_save, columns_to_delete): diff --git a/sos_trades_api/models/database_models.py b/sos_trades_api/models/database_models.py index d52ddbce..a1f9deae 100644 --- a/sos_trades_api/models/database_models.py +++ b/sos_trades_api/models/database_models.py @@ -686,6 +686,7 @@ class StudyCaseChange(db.Model): """StudyCaseChanges class""" DATASET_MAPPING_CHANGE = "dataset_mapping" + DATASET_MAPPING_EXPORT = "dataset_mapping_export" CSV_CHANGE = "csv" SCALAR_CHANGE = "scalar" diff --git a/sos_trades_api/routes/main/study_case_module.py b/sos_trades_api/routes/main/study_case_module.py index 8ea24607..3f71d371 100644 --- a/sos_trades_api/routes/main/study_case_module.py +++ b/sos_trades_api/routes/main/study_case_module.py @@ -23,6 +23,9 @@ from sos_trades_api.controllers.sostrades_main.study_case_controller import ( copy_study_discipline_data, delete_study_cases, + export_study_parameters_from_datasets_mapping, + get_dataset_export_error_message, + get_dataset_export_status, get_dataset_import_error_message, get_file_stream, get_study_data_file_path, @@ -170,6 +173,67 @@ def update_study_from_datasets_mapping(study_id, notification_id): raise BadRequest("Missing mandatory parameter: study identifier in url") +@app.route("/api/main/study-case///export-datasets-mapping", methods=["POST"]) +@auth_required +def export_study_from_datasets_mapping(study_id, notification_id): + if study_id is not None: + user = session["user"] + # Verify user has study case authorisation to load study (Contributor) + study_case_access = StudyCaseAccess(user.id, study_id) + if not study_case_access.check_user_right_for_study(AccessRights.CONTRIBUTOR, study_id): + raise BadRequest( + "You do not have the necessary rights to modify this study case") + + # Proceeding after rights verification + files_data = None + if "datasets_mapping_file" in request.files: + try: + file_content = request.files["datasets_mapping_file"].read().decode("utf-8") + files_data = json.loads(file_content) + + except Exception as ex: + raise BadRequest(f"Invalid JSON format : {ex}") + else: + raise BadRequest("Missing mandatory datasets_mapping_file") + + resp = make_response( + jsonify(export_study_parameters_from_datasets_mapping(study_id, user, files_data, notification_id)), 200) + return resp + + raise BadRequest("Missing mandatory parameter: study identifier in url") + +@app.route("/api/main/study-case///export-datasets-status", methods=["GET"]) +@auth_required +def get_export_study_in_datasets_status(study_id, notification_id): + if study_id is not None: + user = session["user"] + # Verify user has study case authorisation to load study (Contributor) + study_case_access = StudyCaseAccess(user.id, study_id) + if not study_case_access.check_user_right_for_study(AccessRights.CONTRIBUTOR, study_id): + raise BadRequest( + "You do not have the necessary rights to get this study case") + resp = make_response( + jsonify(get_dataset_export_status(study_id, notification_id)), 200) + return resp + + raise BadRequest("Missing mandatory parameter: study identifier in url") + +@app.route("/api/main/study-case///export-datasets-error", methods=["GET"]) +@auth_required +def get_export_study_in_datasets_error(study_id, notification_id): + if study_id is not None: + user = session["user"] + # Verify user has study case authorisation to load study (Contributor) + study_case_access = StudyCaseAccess(user.id, study_id) + if not study_case_access.check_user_right_for_study(AccessRights.CONTRIBUTOR, study_id): + raise BadRequest( + "You do not have the necessary rights to get this study case") + resp = make_response( + jsonify(get_dataset_export_error_message(study_id, notification_id)), 200) + return resp + + raise BadRequest("Missing mandatory parameter: study identifier in url") + @app.route("/api/main/study-case//import-datasets-error-message", methods=["GET"]) @auth_required def get_datasets_import_error_message(study_id): diff --git a/sos_trades_api/routes/message/socketio_module.py b/sos_trades_api/routes/message/socketio_module.py index dda11db4..a1a1cbf7 100644 --- a/sos_trades_api/routes/message/socketio_module.py +++ b/sos_trades_api/routes/message/socketio_module.py @@ -223,4 +223,18 @@ def on_validation_change(data): "study_case_validation": validation}, room=room) +@socketio.on("export") +@auth_refresh_required +def on_export(data): + room = data["study_case_id"] + changes = data["changes"] + user = get_authenticated_user() + + # Emit notification + emit("study-exported", + {"author": f"{user.firstname} {user.lastname}", + "type": UserCoeditionAction.EXPORT, + "changes": changes, + "message": CoeditionMessage.EXPORT_DATASET}, + room=room) diff --git a/sos_trades_api/tools/coedition/coedition.py b/sos_trades_api/tools/coedition/coedition.py index 983c9ef5..e6ffba0a 100644 --- a/sos_trades_api/tools/coedition/coedition.py +++ b/sos_trades_api/tools/coedition/coedition.py @@ -42,6 +42,7 @@ class UserCoeditionAction: EDIT = "edit" DELETE = "delete" VALIDATION_CHANGE = "validation_change" + EXPORT = "export" @classmethod def get_attribute_for_value(cls, value): @@ -61,6 +62,7 @@ class CoeditionMessage: CLAIM = "User just claimed the study case execution right." RELOAD = "User just reload the study case." IMPORT_DATASET = "User just updated parameter from dataset" + EXPORT_DATASET = "User just exported parameters into dataset" def add_user_to_room(user_id, study_case_id): diff --git a/sos_trades_api/tools/loading/loading_study_and_engine.py b/sos_trades_api/tools/loading/loading_study_and_engine.py index ed4eb772..96921fc1 100644 --- a/sos_trades_api/tools/loading/loading_study_and_engine.py +++ b/sos_trades_api/tools/loading/loading_study_and_engine.py @@ -378,6 +378,79 @@ def study_case_manager_update_from_dataset_mapping(study_case_manager, datasets_ f"Error when updating in background (from datasets mapping) {study_case_manager.study.name}: {ex}") +def study_case_manager_export_from_dataset_mapping(study_case_manager, datasets_mapping_deserialized, notification_id): + """ + Method that export study data into a dataset defined with the datasets mapping + + :params: study_case_manager, study case manager instance to load + :type: StudyCaseManager + + :params: datasets_mapping_deserialized, with namespace and parameter mapping to datasets connector and id + :type: dictionary + + """ + from sostrades_core.datasets.datasets_connectors.abstract_datasets_connector import ( + DatasetGenericException, + ) + + from sos_trades_api.models.loaded_study_case import LoadStatus + from sos_trades_api.server.base_server import app + + app.logger.info(f"exporting in background (from datasets mapping) {study_case_manager.study.name}") + + study_case_manager.dataset_export_status_dict[notification_id] = LoadStatus.IN_PROGESS + study_case_manager.dataset_export_error_dict[notification_id] = None + + try: + # Update parameter into dictionary + datasets_parameter_changes = study_case_manager.export_data_from_dataset_mapping( + from_datasets_mapping=datasets_mapping_deserialized) + # Add change to database + with app.app_context(): + for param_chg in datasets_parameter_changes: + # Check if new value is a dataframe or dict + if isinstance(param_chg.old_value, (pandas.DataFrame, dict, ndarray)): + study_case_change = StudyCaseChange.CSV_CHANGE + try: + # Conversion old_value to byte in order to store it in database + serializer = DataSerializer() + old_value_stream = serializer.convert_to_dataframe_and_bytes_io(param_chg.old_value, param_chg.parameter_id) + old_value_bytes = old_value_stream.getvalue() + old_value = None + new_value = None + except Exception as error: + raise f'Error during conversion from {param_chg.variable_type} to byte" : {error}' + else: + study_case_change = StudyCaseChange.DATASET_MAPPING_CHANGE + old_value = str(param_chg.old_value) + old_value_bytes = None + + # Add change into database + add_change_db( + notification_id, + param_chg.parameter_id, + param_chg.variable_type, + None, + study_case_change, + None, + old_value, + old_value_bytes, + param_chg.date, + param_chg.connector_id, + param_chg.dataset_id, + param_chg.dataset_parameter_id, + ) + + study_case_manager.dataset_export_status_dict[notification_id] = LoadStatus.LOADED + except DatasetGenericException as ex: + study_case_manager.dataset_export_error_dict[notification_id] = f"{ex}" + study_case_manager.dataset_export_status_dict[notification_id] = LoadStatus.IN_ERROR + + app.logger.exception( + f"Error when exporting in background (from datasets mapping) {study_case_manager.study.name}: {ex}") + + + def study_case_manager_loading_from_reference(study_case_manager, no_data, read_only, reference_folder, reference_identifier): """ diff --git a/sos_trades_api/tools/loading/study_case_manager.py b/sos_trades_api/tools/loading/study_case_manager.py index 9717f542..20811876 100644 --- a/sos_trades_api/tools/loading/study_case_manager.py +++ b/sos_trades_api/tools/loading/study_case_manager.py @@ -167,6 +167,11 @@ def __init__(self, study_identifier): self.dataset_load_status = LoadStatus.NONE self.dataset_load_error = None + # export status in case of dataset export + # it is a dict with notification id in case multiple export at the same time + self.dataset_export_status_dict = {} + self.dataset_export_error_dict = {} + self.n2_diagram = {} self.__error_message = "" From 8fc3f9e8ab68ef5fcb2f5e30a814908c1edd9bb1 Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 14 Jun 2024 10:29:03 +0200 Subject: [PATCH 06/73] fix notification id check in export datasets requests --- sos_trades_api/routes/main/study_case_module.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/sos_trades_api/routes/main/study_case_module.py b/sos_trades_api/routes/main/study_case_module.py index 3f71d371..24721b57 100644 --- a/sos_trades_api/routes/main/study_case_module.py +++ b/sos_trades_api/routes/main/study_case_module.py @@ -147,7 +147,7 @@ def main_load_study_case_by_id(study_id): @app.route("/api/main/study-case///import-datasets-mapping", methods=["POST"]) @auth_required def update_study_from_datasets_mapping(study_id, notification_id): - if study_id is not None: + if study_id is not None and notification_id is not None: user = session["user"] # Verify user has study case authorisation to load study (Contributor) study_case_access = StudyCaseAccess(user.id, study_id) @@ -171,12 +171,12 @@ def update_study_from_datasets_mapping(study_id, notification_id): jsonify(update_study_parameters_from_datasets_mapping(study_id, user, files_data, notification_id)), 200) return resp - raise BadRequest("Missing mandatory parameter: study identifier in url") + raise BadRequest("Missing mandatory parameter: study or notification identifier in url") @app.route("/api/main/study-case///export-datasets-mapping", methods=["POST"]) @auth_required def export_study_from_datasets_mapping(study_id, notification_id): - if study_id is not None: + if study_id is not None and notification_id is not None: user = session["user"] # Verify user has study case authorisation to load study (Contributor) study_case_access = StudyCaseAccess(user.id, study_id) @@ -200,12 +200,12 @@ def export_study_from_datasets_mapping(study_id, notification_id): jsonify(export_study_parameters_from_datasets_mapping(study_id, user, files_data, notification_id)), 200) return resp - raise BadRequest("Missing mandatory parameter: study identifier in url") + raise BadRequest("Missing mandatory parameter: study or notification identifier in url") @app.route("/api/main/study-case///export-datasets-status", methods=["GET"]) @auth_required def get_export_study_in_datasets_status(study_id, notification_id): - if study_id is not None: + if study_id is not None and notification_id is not None: user = session["user"] # Verify user has study case authorisation to load study (Contributor) study_case_access = StudyCaseAccess(user.id, study_id) @@ -216,12 +216,12 @@ def get_export_study_in_datasets_status(study_id, notification_id): jsonify(get_dataset_export_status(study_id, notification_id)), 200) return resp - raise BadRequest("Missing mandatory parameter: study identifier in url") + raise BadRequest("Missing mandatory parameter: study or notification identifier in url") @app.route("/api/main/study-case///export-datasets-error", methods=["GET"]) @auth_required def get_export_study_in_datasets_error(study_id, notification_id): - if study_id is not None: + if study_id is not None and notification_id is not None: user = session["user"] # Verify user has study case authorisation to load study (Contributor) study_case_access = StudyCaseAccess(user.id, study_id) @@ -232,7 +232,7 @@ def get_export_study_in_datasets_error(study_id, notification_id): jsonify(get_dataset_export_error_message(study_id, notification_id)), 200) return resp - raise BadRequest("Missing mandatory parameter: study identifier in url") + raise BadRequest("Missing mandatory parameter: study or notification identifier in url") @app.route("/api/main/study-case//import-datasets-error-message", methods=["GET"]) @auth_required From b001144f40653a9956591c6149dd367d4ab9c3e3 Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 14 Jun 2024 17:46:24 +0200 Subject: [PATCH 07/73] fix pylint error --- .../controllers/sostrades_main/study_case_controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/controllers/sostrades_main/study_case_controller.py b/sos_trades_api/controllers/sostrades_main/study_case_controller.py index a5dd112f..b33f165a 100644 --- a/sos_trades_api/controllers/sostrades_main/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_main/study_case_controller.py @@ -619,7 +619,7 @@ def export_study_parameters_from_datasets_mapping(study_id, user, datasets_mappi args=(study_manager, datasets_mapping_deserialized, notification_id), ).start() else: - raise exception("study case is currently loading, please retry the export at the end of the loading.") + raise Exception("study case is currently loading, please retry the export at the end of the loading.") # deal with errors elif study_manager.dataset_export_status_dict[notification_id] == LoadStatus.IN_ERROR: if notification_id in study_manager.dataset_export_error_dict.keys(): From 19c15008d35a69664a19dcd8249f492549a318f0 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Thu, 20 Jun 2024 08:14:57 +0000 Subject: [PATCH 08/73] pip-compile of api.requirements.txt --- api.requirements.txt | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/api.requirements.txt b/api.requirements.txt index c20bb6a9..fdfddf28 100644 --- a/api.requirements.txt +++ b/api.requirements.txt @@ -53,7 +53,7 @@ dill==0.3.8 # via openturns dnspython==2.6.1 # via eventlet -ecos==2.0.13 +ecos==2.0.14 # via cvxpy et-xmlfile==1.1.0 # via openpyxl @@ -181,6 +181,7 @@ numpy==1.24.4 # matplotlib # nlopt # numpoly + # numpy-financial # osqp # pandas # pdfo @@ -192,6 +193,8 @@ numpy==1.24.4 # scipy # scs # seaborn +numpy-financial==1.0.0 + # via -r ./portfolio-management/requirements.in oauthlib==3.2.2 # via # kubernetes @@ -341,7 +344,7 @@ pyyaml==6.0.1 # -r ./sostrades-core/requirements.in # -r ./sostrades-webapi/requirements.in # kubernetes -qdldl==0.1.7.post2 +qdldl==0.1.7.post3 # via osqp requests==2.31.0 # via @@ -379,7 +382,7 @@ scipy==1.10.1 # scikit-learn # scs # seaborn -scs==3.2.4.post2 +scs==3.2.4.post3 # via cvxpy seaborn==0.9.0 # via -r ./witness-core/requirements.in @@ -410,7 +413,7 @@ sympy==1.4 # via # -r ./gemseo/requirements.txt # -r ./sostrades-core/requirements.in -tenacity==8.3.0 +tenacity==8.4.1 # via plotly threadpoolctl==3.5.0 # via scikit-learn From c5ebd92622b1f746a9dcfe9b2061b28c485f3a9a Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Mon, 24 Jun 2024 14:49:47 +0200 Subject: [PATCH 09/73] Ruff fixes --- migrations/env.py | 4 ++-- sos_trades_api/routes/api_v0/__init__.py | 5 +---- sos_trades_api/routes/data/__init__.py | 5 +---- sos_trades_api/routes/main/__init__.py | 6 +----- sos_trades_api/routes/message/__init__.py | 5 +---- sos_trades_api/routes/post_processing/__init__.py | 5 +---- sos_trades_api/server/base_server.py | 4 ++-- .../tests/controllers/unit_test_basic_config.py | 3 ++- .../tools/execution/execution_engine_observer.py | 4 ++-- sos_trades_api/tools/logger/reference_mysql_handler.py | 4 ++-- sos_trades_api/tools/visualisation/interface_diagram.py | 8 ++++---- 11 files changed, 19 insertions(+), 34 deletions(-) diff --git a/migrations/env.py b/migrations/env.py index 40bea365..39695353 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -34,7 +34,7 @@ # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata -from flask import current_app +from flask import current_app # noqa: E402 config.set_main_option( "sqlalchemy.url", current_app.config.get( diff --git a/sos_trades_api/routes/api_v0/__init__.py b/sos_trades_api/routes/api_v0/__init__.py index cabf23a4..74f63880 100644 --- a/sos_trades_api/routes/api_v0/__init__.py +++ b/sos_trades_api/routes/api_v0/__init__.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -14,9 +14,6 @@ limitations under the License. ''' -""" -RESTful API views -""" import os diff --git a/sos_trades_api/routes/data/__init__.py b/sos_trades_api/routes/data/__init__.py index 375a6ebb..d837459b 100644 --- a/sos_trades_api/routes/data/__init__.py +++ b/sos_trades_api/routes/data/__init__.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -14,9 +14,6 @@ limitations under the License. ''' -""" -RESTful API views -""" import os.path diff --git a/sos_trades_api/routes/main/__init__.py b/sos_trades_api/routes/main/__init__.py index 6e9a99e2..d837459b 100644 --- a/sos_trades_api/routes/main/__init__.py +++ b/sos_trades_api/routes/main/__init__.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -14,10 +14,6 @@ limitations under the License. ''' -""" -RESTful API views -""" - import os.path diff --git a/sos_trades_api/routes/message/__init__.py b/sos_trades_api/routes/message/__init__.py index 6e9a99e2..e76bdec4 100644 --- a/sos_trades_api/routes/message/__init__.py +++ b/sos_trades_api/routes/message/__init__.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -14,9 +14,6 @@ limitations under the License. ''' -""" -RESTful API views -""" import os.path diff --git a/sos_trades_api/routes/post_processing/__init__.py b/sos_trades_api/routes/post_processing/__init__.py index 6e9a99e2..e76bdec4 100644 --- a/sos_trades_api/routes/post_processing/__init__.py +++ b/sos_trades_api/routes/post_processing/__init__.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -14,9 +14,6 @@ limitations under the License. ''' -""" -RESTful API views -""" import os.path diff --git a/sos_trades_api/server/base_server.py b/sos_trades_api/server/base_server.py index 9a309cf8..baf372f8 100644 --- a/sos_trades_api/server/base_server.py +++ b/sos_trades_api/server/base_server.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/11/22-2024/05/16 Copyright 2023 Capgemini +Modifications on 2023/11/22-2024/06/13 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -931,7 +931,7 @@ def my_expired_token_callback(expired_token): # For migration to detect new tables # After running migration script, remove them from here to prevent import # error - if app != None and db != None: + if app is not None and db is not None: migrate = Migrate(app, db, compare_type=False) # Attention compare type find a difference in ReferenceGenerationStatus diff --git a/sos_trades_api/tests/controllers/unit_test_basic_config.py b/sos_trades_api/tests/controllers/unit_test_basic_config.py index cb60aa2c..b22e6844 100644 --- a/sos_trades_api/tests/controllers/unit_test_basic_config.py +++ b/sos_trades_api/tests/controllers/unit_test_basic_config.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -71,6 +71,7 @@ print(f"Database used for test: {test_database_name}") print(f"Database used for test log: {test_log_database_name}") +# ruff: noqa: E402 import unittest from builtins import classmethod diff --git a/sos_trades_api/tools/execution/execution_engine_observer.py b/sos_trades_api/tools/execution/execution_engine_observer.py index 29526d30..2c669fe9 100644 --- a/sos_trades_api/tools/execution/execution_engine_observer.py +++ b/sos_trades_api/tools/execution/execution_engine_observer.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -115,7 +115,7 @@ def __update_database(self): disciplines_entries[self.__identifier_mapping[discipline_identifier] ] = discipline_status - if elasped_time > 2.0 or flush == True: + if elasped_time > 2.0 or flush: if len(disciplines_entries) > 0: for discipline_identifier, discipline_status in disciplines_entries.items(): diff --git a/sos_trades_api/tools/logger/reference_mysql_handler.py b/sos_trades_api/tools/logger/reference_mysql_handler.py index 99fc3c45..d353af29 100644 --- a/sos_trades_api/tools/logger/reference_mysql_handler.py +++ b/sos_trades_api/tools/logger/reference_mysql_handler.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/06/07 Copyright 2024 Capgemini +Modifications on 2024/06/07-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at @@ -116,7 +116,7 @@ def __write_into_database(self, flush=False): :params: flush, boolean to flush the list without taking into account number of elements :type: boolean """ - if len(self.__inner_bulk_list) > 200 or flush == True: + if len(self.__inner_bulk_list) > 200 or flush: try: with app.app_context(): db.session.bulk_save_objects(self.__inner_bulk_list) diff --git a/sos_trades_api/tools/visualisation/interface_diagram.py b/sos_trades_api/tools/visualisation/interface_diagram.py index d22104ae..28e3f668 100644 --- a/sos_trades_api/tools/visualisation/interface_diagram.py +++ b/sos_trades_api/tools/visualisation/interface_diagram.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/05/16 Copyright 2024 Capgemini +Modifications on 2024/05/16-2024/06/13 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -536,9 +536,9 @@ def filter_discipline_with_no_exchanges( filtered_discipline_node_list = [] for disc_dict in discipline_node_list: disc_links = [ - l["id"] - for l in links_list - if l["from"] == disc_dict["id"] or l["to"] == disc_dict["id"] + link["id"] + for link in links_list + if link["from"] == disc_dict["id"] or link["to"] == disc_dict["id"] ] if len(disc_links) > 0: disc_dict["couplings_number"] = len(disc_links) From 74a9f6ab833d257c23abbb48ed95bfc8e9b70005 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Mon, 24 Jun 2024 15:20:49 +0200 Subject: [PATCH 10/73] Ruff fixes --- .../active_study_management.py | 3 +-- .../visualisation/execution_workflow_graph.py | 20 +++++++++---------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/sos_trades_api/tools/active_study_management/active_study_management.py b/sos_trades_api/tools/active_study_management/active_study_management.py index 7280eb31..ab8eb4d6 100644 --- a/sos_trades_api/tools/active_study_management/active_study_management.py +++ b/sos_trades_api/tools/active_study_management/active_study_management.py @@ -45,7 +45,6 @@ def check_studies_last_active_date( delay_hr, logger): is_inactive = False try: - # read the file and get the last_active date with open(file) as f: last_active_date_str = f.readline().strip() @@ -61,7 +60,7 @@ def check_studies_last_active_date( delay_hr, logger): # check if the date is past the delay of inactivity - if last_active_date != None: + if last_active_date is not None: delta_time = datetime.now() - timedelta(hours=delay_hr) is_inactive = last_active_date < delta_time diff --git a/sos_trades_api/tools/visualisation/execution_workflow_graph.py b/sos_trades_api/tools/visualisation/execution_workflow_graph.py index d93294c0..695f1768 100644 --- a/sos_trades_api/tools/visualisation/execution_workflow_graph.py +++ b/sos_trades_api/tools/visualisation/execution_workflow_graph.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/05/12-2023/11/03 Copyright 2023 Capgemini +Modifications on 2023/05/12-2024/06/24 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -531,7 +531,7 @@ def create_dot_graph(self): drawn_nodes = set() for nodeId, node in self.nodes_dict.items(): - if node["hasLinks"] == True and node["level"] == 1: + if node["hasLinks"] and node["level"] == 1: dot.node( name=str(nodeId), label="\n".join([node["label"], node["type"]]), @@ -645,10 +645,10 @@ def replace_scatter_data_by_links( # look for all in links to the scatter data with this parameter in_links = { - l_id: l - for l_id, l in self.links_dict.items() - if l["to"] == param_mapping["discipline"] - and param_in in l["parameters"] + l_id: link_aux + for l_id, link_aux in self.links_dict.items() + if link_aux["to"] == param_mapping["discipline"] + and param_in in link_aux["parameters"] } for l_in_id, l_dict in in_links.items(): @@ -697,10 +697,10 @@ def replace_scatter_data_by_links( # look for all out links of the scatter data with this # parameter out_links = { - l_id: l - for l_id, l in self.links_dict.items() - if l["from"] == param_mapping["discipline"] - and param_in in l["parameters"] + l_id: link_aux + for l_id, link_aux in self.links_dict.items() + if link_aux["from"] == param_mapping["discipline"] + and param_in in link_aux["parameters"] } for l_out_id, l_dict in out_links.items(): From 7b15ab952d5b3b557b286b2f74b60ea6726d9609 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Mon, 24 Jun 2024 17:47:11 +0200 Subject: [PATCH 11/73] Ruff fixes --- .../sostrades_main/study_case_controller.py | 8 ++--- .../tests/controllers/l0_test_calculation.py | 31 +++++++------------ .../process_management/process_management.py | 7 ++--- .../reference_management.py | 4 +-- 4 files changed, 21 insertions(+), 29 deletions(-) diff --git a/sos_trades_api/controllers/sostrades_main/study_case_controller.py b/sos_trades_api/controllers/sostrades_main/study_case_controller.py index b33f165a..c744b97b 100644 --- a/sos_trades_api/controllers/sostrades_main/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_main/study_case_controller.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/08/30-2024/05/16 Copyright 2023 Capgemini +Modifications on 2023/08/30-2024/06/24 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -689,11 +689,11 @@ def update_study_parameters(study_id, user, files_list, file_info, parameters_to study_manager = study_case_cache.get_study_case(study_id, True) # Create notification - if parameters_to_save != [] or files_list != None or columns_to_delete != []: + if parameters_to_save != [] or files_list is not None or columns_to_delete != []: # Add notification to database new_notification_id = add_notification_db(study_id, user, UserCoeditionAction.SAVE, CoeditionMessage.SAVE) - if files_list != None: + if files_list is not None: for file in files_list: # Converted file stream to a data frame # Write temporarly the received file @@ -1174,7 +1174,7 @@ def clean_database_with_disabled_study_case(logger=None): @type Logger """ - study_list = StudyCase.query.filter(StudyCase.disabled == True).all() + study_list = StudyCase.query.filter(StudyCase.disabled).all() logger.info(f"{len(study_list)} study disabled found") diff --git a/sos_trades_api/tests/controllers/l0_test_calculation.py b/sos_trades_api/tests/controllers/l0_test_calculation.py index 6657fd0f..8eeee1ce 100644 --- a/sos_trades_api/tests/controllers/l0_test_calculation.py +++ b/sos_trades_api/tests/controllers/l0_test_calculation.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/03/06 Copyright 2024 Capgemini +Modifications on 2024/03/06-2024/06/24 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -183,16 +183,12 @@ def test_02_calculation_status(self): ) from sos_trades_api.models.database_models import StudyCase, StudyCaseExecution with DatabaseUnitTestConfiguration.app.app_context(): - sc = StudyCase.query.filter( - StudyCase.name == self.test_study_name).first() - self.assertIsNotNone(sc.current_execution_id, - "No study case execution created") + sc = StudyCase.query.filter(StudyCase.name == self.test_study_name).first() + self.assertIsNotNone(sc.current_execution_id, "No study case execution created") sc_status = calculation_status(sc.id) - sce = StudyCaseExecution.query.filter( - StudyCaseExecution.id == sc.current_execution_id).first() - self.assertEqual(sc_status.study_case_execution_status, sce.execution_status, - "Study case execution status not coherent") + sce = StudyCaseExecution.query.filter(StudyCaseExecution.id == sc.current_execution_id).first() + self.assertEqual(sc_status.study_case_execution_status, sce.execution_status, "Study case execution status not coherent") def test_03_get_calculation_dashboard(self): import os @@ -200,6 +196,7 @@ def test_03_get_calculation_dashboard(self): from sos_trades_api.controllers.sostrades_data.calculation_controller import ( execute_calculation, + stop_calculation, get_calculation_dashboard, ) from sos_trades_api.models.database_models import ( @@ -208,16 +205,13 @@ def test_03_get_calculation_dashboard(self): User, ) with DatabaseUnitTestConfiguration.app.app_context(): - sc = StudyCase.query.filter( - StudyCase.name == self.test_study_name).first() + sc = StudyCase.query.filter(StudyCase.name == self.test_study_name).first() + stop_calculation(sc.id) os.environ["SOS_TRADES_EXECUTION_STRATEGY"] = "thread" execute_calculation(sc.id, User.STANDARD_USER_ACCOUNT_NAME) - calc_dashboard = list(filter(lambda cd: cd.execution_status == StudyCaseExecution.PENDING, - get_calculation_dashboard())) - self.assertTrue(len(calc_dashboard) >= 1, - "At least one study should be running.") - self.assertEqual(calc_dashboard[0].study_case_id, sc.id, - f"Study running should be study with id { sc.id }") + calc_dashboard = list(filter(lambda cd: cd.execution_status == StudyCaseExecution.PENDING, get_calculation_dashboard())) + self.assertTrue(len(calc_dashboard) >= 1, "At least one study should be running.") + self.assertEqual(calc_dashboard[0].study_case_id, sc.id, f"Study running should be study with id { sc.id }") # Wait for process calculation end time.sleep(50.0) @@ -229,8 +223,7 @@ def test_04_stop_calculation(self): ) from sos_trades_api.models.database_models import StudyCase, StudyCaseExecution with DatabaseUnitTestConfiguration.app.app_context(): - sc = StudyCase.query.filter( - StudyCase.name == self.test_study_name).first() + sc = StudyCase.query.filter(StudyCase.name == self.test_study_name).first() stop_calculation(sc.id) sc_status = calculation_status(sc.id) diff --git a/sos_trades_api/tools/process_management/process_management.py b/sos_trades_api/tools/process_management/process_management.py index d2a32c85..f827836a 100644 --- a/sos_trades_api/tools/process_management/process_management.py +++ b/sos_trades_api/tools/process_management/process_management.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/10/26-2023/11/03 Copyright 2023 Capgemini +Modifications on 2023/10/26-2024/06/24 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -209,7 +209,7 @@ def update_database_with_process(additional_repository_list=None, logger=None, d logger.info(f"{new_process_count} new process(es) found") logger.info(f"{enabled_process_count} enabled process(es)") - disabled_process = Process.query.filter(Process.disabled == True).all() + disabled_process = Process.query.filter(Process.disabled).all() if len(disabled_process) > 0: from sos_trades_api.controllers.sostrades_main.study_case_controller import ( delete_study_cases, @@ -248,8 +248,7 @@ def update_database_with_process(additional_repository_list=None, logger=None, d f'Removed study case with id : {sc_id} and name : "{sc_name}"') logger.info("Start deleting disabled process...") - disabled_process_to_delete = Process.query.filter( - Process.disabled == True).all() + disabled_process_to_delete = Process.query.filter(Process.disabled).all() for process in disabled_process_to_delete: logger.info( f'Removed process with id : {process.id} and name : "{process.name}"') diff --git a/sos_trades_api/tools/reference_management/reference_management.py b/sos_trades_api/tools/reference_management/reference_management.py index cae4cdb0..268a8462 100644 --- a/sos_trades_api/tools/reference_management/reference_management.py +++ b/sos_trades_api/tools/reference_management/reference_management.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/06/30-2023/11/03 Copyright 2023 Capgemini +Modifications on 2023/06/30-2024/06/24 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -91,7 +91,7 @@ def update_database_with_references(logger=None): logger.info(f"{new_references_count} new reference(s) found") logger.info(f"{enabled_references_count} enabled reference(s)") - disabled_references = ReferenceStudy.query.filter(ReferenceStudy.disabled == True).all() + disabled_references = ReferenceStudy.query.filter(ReferenceStudy.disabled).all() if len(disabled_references) > 0: logger.info(f"{len(disabled_references)} disabled reference found.") From bf5b286bbac6d430eee5e8d83ea355d2b34b58d3 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 25 Jun 2024 12:11:39 +0200 Subject: [PATCH 12/73] Ruff fixes --- .../controllers/sostrades_data/calculation_controller.py | 6 +++--- .../controllers/sostrades_data/study_case_controller.py | 6 +++--- sos_trades_api/server/base_server.py | 4 ++-- sos_trades_api/tests/controllers/l0_test_calculation.py | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/sos_trades_api/controllers/sostrades_data/calculation_controller.py b/sos_trades_api/controllers/sostrades_data/calculation_controller.py index b52831ae..b6fc6aa7 100644 --- a/sos_trades_api/controllers/sostrades_data/calculation_controller.py +++ b/sos_trades_api/controllers/sostrades_data/calculation_controller.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/05/12-2024/04/05 Copyright 2023 Capgemini +Modifications on 2023/05/12-2024/06/25 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -128,8 +128,8 @@ def execute_calculation(study_id, username): # study_case_execution_id key) StudyCaseExecutionLog.query\ .filter(StudyCaseExecutionLog.study_case_id == study_id)\ - .filter(StudyCaseExecutionLog.study_case_execution_id == None)\ - .delete() + .filter(StudyCaseExecutionLog.study_case_execution_id == None) \ + .delete() # noqa: E711 db.session.commit() # Once the process is validated, then generate the corresponding data diff --git a/sos_trades_api/controllers/sostrades_data/study_case_controller.py b/sos_trades_api/controllers/sostrades_data/study_case_controller.py index 1a3334ca..64256dd5 100644 --- a/sos_trades_api/controllers/sostrades_data/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_data/study_case_controller.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/08/30-2024/05/07 Copyright 2023 Capgemini +Modifications on 2023/08/30-2024/06/25 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -119,7 +119,7 @@ def create_empty_study_case( .join(GroupAccessUser) .filter(GroupAccessUser.user_id == user_identifier) .filter(Group.id == group_identifier) - .filter(StudyCase.disabled == False) + .filter(not StudyCase.disabled) .all() ) @@ -393,7 +393,7 @@ def edit_study(study_id, new_group_id, new_study_name, user_id, new_flavor:str): Group).join(GroupAccessUser) \ .filter(GroupAccessUser.user_id == user_id) \ .filter(Group.id == new_group_id) \ - .filter(StudyCase.disabled == False).all() + .filter(not StudyCase.disabled).all() for study in study_name_list: if study.name == new_study_name: diff --git a/sos_trades_api/server/base_server.py b/sos_trades_api/server/base_server.py index baf372f8..b3c876f0 100644 --- a/sos_trades_api/server/base_server.py +++ b/sos_trades_api/server/base_server.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2023/11/22-2024/06/13 Copyright 2023 Capgemini +Modifications on 2023/11/22-2024/06/25 Copyright 2023 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -986,7 +986,7 @@ def after_request(response): "http://localhost:4200") response.headers.add("Access-Control-Allow-Credentials", "true") response.headers.add("Access-Control-Allow-Headers", - 'Content-Type,Authorization,Set-Cookie,Cookie,Cache-Control,Pragma,Expires') # noqa + 'Content-Type,Authorization,Set-Cookie,Cookie,Cache-Control,Pragma,Expires') response.headers.add("Access-Control-Allow-Methods", "GET,PUT,POST,DELETE") diff --git a/sos_trades_api/tests/controllers/l0_test_calculation.py b/sos_trades_api/tests/controllers/l0_test_calculation.py index 8eeee1ce..45d7ea42 100644 --- a/sos_trades_api/tests/controllers/l0_test_calculation.py +++ b/sos_trades_api/tests/controllers/l0_test_calculation.py @@ -1,6 +1,6 @@ ''' Copyright 2022 Airbus SAS -Modifications on 2024/03/06-2024/06/24 Copyright 2024 Capgemini +Modifications on 2024/03/06-2024/06/25 Copyright 2024 Capgemini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -196,8 +196,8 @@ def test_03_get_calculation_dashboard(self): from sos_trades_api.controllers.sostrades_data.calculation_controller import ( execute_calculation, - stop_calculation, get_calculation_dashboard, + stop_calculation, ) from sos_trades_api.models.database_models import ( StudyCase, From 89ffd8bd243a05df3154515c5e7a4f5875d568c7 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Tue, 25 Jun 2024 12:15:47 +0200 Subject: [PATCH 13/73] [test] update metric during calculation from kubernetes --- .../tools/execution/execution_metrics.py | 51 ++++++++++++++----- .../tools/kubernetes/kubernetes_service.py | 15 ++++-- 2 files changed, 48 insertions(+), 18 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 63228ce7..d7fc5dd0 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -19,8 +19,11 @@ import psutil -from sos_trades_api.models.database_models import StudyCaseExecution +from sos_trades_api.config import Config +from sos_trades_api.controllers.sostrades_data.study_case_controller import get_study_case_allocation +from sos_trades_api.models.database_models import StudyCaseExecution, StudyCase from sos_trades_api.server.base_server import app, db +from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info """ Execution metric thread @@ -61,18 +64,40 @@ def __update_database(self): # shut down calculation try: # Open a database context - with app.app_context(): - study_case_execution = StudyCaseExecution.query. \ - filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() - - # Check environment info - cpu_count_physical = psutil.cpu_count() - cpu_usage = round((psutil.cpu_percent() / 100) * cpu_count_physical, 2) - cpu_metric = f"{cpu_usage}/{cpu_count_physical}" - - memory_count = round(psutil.virtual_memory()[0] / (1024 * 1024 * 1024), 2) - memory_usage = round(psutil.virtual_memory()[3] / (1024 * 1024 * 1024), 2) - memory_metric = f"{memory_usage}/{memory_count} [GB]" + with (((app.app_context()))): + study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() + + config = Config() + if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: + study_case_allocation = get_study_case_allocation(study_case_execution.study_case_id) + + # Retrieve memory and cpu from kubernetes + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) + + # Retrieve study case from database + study_case = StudyCase.query.filter(StudyCase.id.like(study_case_execution.study_case_id)).first() + + # Retrieve limits of pod from config + cpu_limits = '' + memory_limits = '' + pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case.execution_pod_flavor]["limits"] + if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + cpu_limits = pod_execution_limit_from_config["cpu"] + memory_limits = pod_execution_limit_from_config["memory"] + + cpu_metric = f'{cpu_limits}' + memory_metric = f'{memory_limits} [GB]' + print(cpu_metric, memory_metric) + + else: + # Check environment info + cpu_count_physical = psutil.cpu_count() + cpu_usage = round((psutil.cpu_percent() / 100) * cpu_count_physical, 2) + cpu_metric = f"{cpu_usage}/{cpu_count_physical}" + + memory_count = round(psutil.virtual_memory()[0] / (1024 * 1024 * 1024), 2) + memory_usage = round(psutil.virtual_memory()[3] / (1024 * 1024 * 1024), 2) + memory_metric = f"{memory_usage}/{memory_count} [GB]" study_case_execution.cpu_usage = cpu_metric study_case_execution.memory_usage = memory_metric diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 8175c920..2fc0b939 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -318,17 +318,22 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): pod_cpu = round(float("".join( filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - pod_memory = round(float("".join( - filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))) / (1024 * 1024), 2) - result["cpu"] = f"{pod_cpu} [-]" - result["memory"] = f"{pod_memory} [Go]" + + # Retrieve memory usage and convert it to GB + + pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) + pod_memory_gib = pod_memory_kib / (1024 * 1024) + gigabyte = 1.073741824 + pod_memory_gb = pod_memory_gib * gigabyte + + result["cpu"] = pod_cpu + result["memory"] = pod_memory_gb except Exception as error: message = f"Unable to retrieve pod metrics: {error}" app.logger.error(message) raise ExecutionEngineKuberneteError(message) - return result def kubernetes_delete_deployment_and_service(pod_name, pod_namespace): From 6e0f19302e58d4d18aceeb6c8eebd0df1b23376f Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:16:47 +0200 Subject: [PATCH 14/73] [test] fix error update metric+ add print --- sos_trades_api/tools/execution/execution_metrics.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index d7fc5dd0..fb021bb1 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -64,16 +64,17 @@ def __update_database(self): # shut down calculation try: # Open a database context - with (((app.app_context()))): + with app.app_context(): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() config = Config() + print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: study_case_allocation = get_study_case_allocation(study_case_execution.study_case_id) # Retrieve memory and cpu from kubernetes result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) - + print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') # Retrieve study case from database study_case = StudyCase.query.filter(StudyCase.id.like(study_case_execution.study_case_id)).first() @@ -85,9 +86,8 @@ def __update_database(self): cpu_limits = pod_execution_limit_from_config["cpu"] memory_limits = pod_execution_limit_from_config["memory"] - cpu_metric = f'{cpu_limits}' - memory_metric = f'{memory_limits} [GB]' - print(cpu_metric, memory_metric) + cpu_metric = f'{result["cpu"]}/{cpu_limits}' + memory_metric = f'{result["memory"]}/{memory_limits} [GB]' else: # Check environment info From e4106fd77bb66f2dea6a37a032bda4e4bf8c9d3a Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:52:06 +0200 Subject: [PATCH 15/73] [test update metric] - add print v2 --- sos_trades_api/tools/execution/execution_metrics.py | 2 +- sos_trades_api/tools/kubernetes/kubernetes_service.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index fb021bb1..cc505df2 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -71,7 +71,7 @@ def __update_database(self): print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: study_case_allocation = get_study_case_allocation(study_case_execution.study_case_id) - + print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') # Retrieve memory and cpu from kubernetes result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 2fc0b939..b9a11b89 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -313,9 +313,9 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): api = client.CustomObjectsApi() resources = api.list_namespaced_custom_object(group="metrics.k8s.io", version="v1beta1", namespace=pod_namespace, plural="pods") - + print(resources["items"]) pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - + print(pod_searched) pod_cpu = round(float("".join( filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) From f18bfab809928eb95facb6ffb3c23ecb17bad73c Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Tue, 25 Jun 2024 15:23:36 +0200 Subject: [PATCH 16/73] [test update metric] - fix error --- sos_trades_api/tools/execution/execution_metrics.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index cc505df2..df4f7c62 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -21,7 +21,7 @@ from sos_trades_api.config import Config from sos_trades_api.controllers.sostrades_data.study_case_controller import get_study_case_allocation -from sos_trades_api.models.database_models import StudyCaseExecution, StudyCase +from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation from sos_trades_api.server.base_server import app, db from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info @@ -70,18 +70,18 @@ def __update_database(self): config = Config() print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: - study_case_allocation = get_study_case_allocation(study_case_execution.study_case_id) + study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( + PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, + ).first() print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') # Retrieve memory and cpu from kubernetes result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') - # Retrieve study case from database - study_case = StudyCase.query.filter(StudyCase.id.like(study_case_execution.study_case_id)).first() # Retrieve limits of pod from config cpu_limits = '' memory_limits = '' - pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case.execution_pod_flavor]["limits"] + pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: cpu_limits = pod_execution_limit_from_config["cpu"] memory_limits = pod_execution_limit_from_config["memory"] From 18cfad1ec6eeef6bca891c25b062da009628810f Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 25 Jun 2024 13:33:35 +0000 Subject: [PATCH 17/73] merge integration to validation From ef8981d24f3ee49878fa2c8899454179b79715eb Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Tue, 25 Jun 2024 15:54:51 +0200 Subject: [PATCH 18/73] [test update metric] - fix error v3 --- .../tools/execution/execution_metrics.py | 39 +++++++++---------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index df4f7c62..6b4d29b7 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -20,7 +20,6 @@ import psutil from sos_trades_api.config import Config -from sos_trades_api.controllers.sostrades_data.study_case_controller import get_study_case_allocation from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation from sos_trades_api.server.base_server import app, db from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info @@ -66,28 +65,28 @@ def __update_database(self): # Open a database context with app.app_context(): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() - config = Config() print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: - study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( - PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, - ).first() - print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') - # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) - print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') - - # Retrieve limits of pod from config - cpu_limits = '' - memory_limits = '' - pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] - if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: - cpu_limits = pod_execution_limit_from_config["cpu"] - memory_limits = pod_execution_limit_from_config["memory"] - - cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [GB]' + if study_case_execution.execution_status == StudyCaseExecution.RUNNING: + study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( + PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, + ).first() + print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') + # Retrieve memory and cpu from kubernetes + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) + print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') + + # Retrieve limits of pod from config + cpu_limits = '' + memory_limits = '' + pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] + if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + cpu_limits = pod_execution_limit_from_config["cpu"] + memory_limits = pod_execution_limit_from_config["memory"] + + cpu_metric = f'{result["cpu"]}/{cpu_limits}' + memory_metric = f'{result["memory"]}/{memory_limits} [GB]' else: # Check environment info From ac1fb61391718ec565ffc104861e9007421b3562 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 25 Jun 2024 21:15:49 +0000 Subject: [PATCH 19/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index d920cd02..9c407b6e 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Tue Jun 25 13:33:35 UTC 2024 +Tue Jun 25 21:15:49 UTC 2024 From 9126d85b630f03cc6f0060dc83ab1c7e701a08b5 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 25 Jun 2024 21:15:49 +0000 Subject: [PATCH 20/73] merge integration to validation From 2086304bed9851e5bca98ef5619c4d58851454fa Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 09:34:34 +0200 Subject: [PATCH 21/73] [test update metric] - fix error v4 --- .../tools/execution/execution_metrics.py | 37 +++++++++---------- .../tools/kubernetes/kubernetes_service.py | 25 +++++++------ 2 files changed, 32 insertions(+), 30 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 6b4d29b7..213a4416 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -68,25 +68,24 @@ def __update_database(self): config = Config() print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: - if study_case_execution.execution_status == StudyCaseExecution.RUNNING: - study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( - PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, - ).first() - print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') - # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) - print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') - - # Retrieve limits of pod from config - cpu_limits = '' - memory_limits = '' - pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] - if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: - cpu_limits = pod_execution_limit_from_config["cpu"] - memory_limits = pod_execution_limit_from_config["memory"] - - cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [GB]' + study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( + PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, + ).first() + print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') + # Retrieve memory and cpu from kubernetes + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) + print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') + + # Retrieve limits of pod from config + cpu_limits = '' + memory_limits = '' + pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] + if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + cpu_limits = pod_execution_limit_from_config["cpu"] + memory_limits = pod_execution_limit_from_config["memory"] + + cpu_metric = f'{result["cpu"]}/{cpu_limits}' + memory_metric = f'{result["memory"]}/{memory_limits} [GB]' else: # Check environment info diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index b9a11b89..43e8ca52 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -312,22 +312,25 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): try: api = client.CustomObjectsApi() resources = api.list_namespaced_custom_object(group="metrics.k8s.io", version="v1beta1", - namespace=pod_namespace, plural="pods") - print(resources["items"]) + namespace=pod_namespace, plural="pods") + pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) print(pod_searched) - pod_cpu = round(float("".join( - filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) + if len(pod_searched) > 0: + pod_cpu = round(float("".join( + filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - # Retrieve memory usage and convert it to GB + # Retrieve memory usage and convert it to GB - pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) - pod_memory_gib = pod_memory_kib / (1024 * 1024) - gigabyte = 1.073741824 - pod_memory_gb = pod_memory_gib * gigabyte + pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) + pod_memory_gib = pod_memory_kib / (1024 * 1024) + gigabyte = 1.073741824 + pod_memory_gb = pod_memory_gib * gigabyte - result["cpu"] = pod_cpu - result["memory"] = pod_memory_gb + result["cpu"] = pod_cpu + result["memory"] = pod_memory_gb + else: + print({resources["items"]}) except Exception as error: message = f"Unable to retrieve pod metrics: {error}" From 6658b1c854ec52293fa003ba14fc6f7b4c5e341f Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 09:37:43 +0200 Subject: [PATCH 22/73] [test update metric] - fix print --- sos_trades_api/tools/kubernetes/kubernetes_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 43e8ca52..fc58a1d1 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -330,7 +330,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): result["cpu"] = pod_cpu result["memory"] = pod_memory_gb else: - print({resources["items"]}) + print(resources["items"]) except Exception as error: message = f"Unable to retrieve pod metrics: {error}" From f6088504a5db7af5ab0c3b6e02fd59b1556815c3 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 10:44:51 +0200 Subject: [PATCH 23/73] [test update metric] - update methode to retrieve metrics --- .../tools/execution/execution_metrics.py | 7 +- .../tools/kubernetes/kubernetes_service.py | 84 ++++++++++++++----- 2 files changed, 68 insertions(+), 23 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 213a4416..3a63c988 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -16,7 +16,7 @@ ''' import threading import time - +import re import psutil from sos_trades_api.config import Config @@ -81,8 +81,9 @@ def __update_database(self): memory_limits = '' pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: - cpu_limits = pod_execution_limit_from_config["cpu"] - memory_limits = pod_execution_limit_from_config["memory"] + # Retrieve only numbers of limits + cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) + memory_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["memory"]))) cpu_metric = f'{result["cpu"]}/{cpu_limits}' memory_metric = f'{result["memory"]}/{memory_limits} [GB]' diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index fc58a1d1..2ad631da 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -16,7 +16,6 @@ ''' import time from functools import partial - import urllib3 from kubernetes import client, config, watch @@ -306,31 +305,76 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): "cpu": "----", "memory": "----", } - # Create k8 api client object kubernetes_load_kube_config() try: + v1 = client.CoreV1Api() api = client.CustomObjectsApi() - resources = api.list_namespaced_custom_object(group="metrics.k8s.io", version="v1beta1", - namespace=pod_namespace, plural="pods") - - pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - print(pod_searched) - if len(pod_searched) > 0: - pod_cpu = round(float("".join( - filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) + pods = v1.list_namespaced_pod(pod_namespace) - # Retrieve memory usage and convert it to GB - - pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) - pod_memory_gib = pod_memory_kib / (1024 * 1024) - gigabyte = 1.073741824 - pod_memory_gb = pod_memory_gib * gigabyte - - result["cpu"] = pod_cpu - result["memory"] = pod_memory_gb + target_pod = None + for pod in pods.items: + if pod.metadata.name == pod_name: + target_pod = pod + break + if target_pod: + if target_pod.status.phase == "Running": + async_request = api.list_namespaced_custom_object( + group="metrics.k8s.io", + version="v1beta1", + namespace=pod_namespace, + plural="pods", + async_req=True + ) + # Attente que la requête asynchrone soit terminée + while not async_request.ready(): + print("Attente des métriques...") + time.sleep(1) + + resources = async_request.get() + + # Recherche des métriques pour le pod cible + pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) + print(pod_searched) + if len(pod_searched) > 0: + pod_cpu = round(float("".join( + filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) + + # Retrieve memory usage and convert it to GB + + pod_memory_kib = round( + float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) + pod_memory_gib = pod_memory_kib / (1024 * 1024) + gigabyte = 1.073741824 + pod_memory_gb = pod_memory_gib * gigabyte + + result["cpu"] = pod_cpu + result["memory"] = pod_memory_gb + else: + print(f" {pod_name} is not running. Status: {target_pod.status.phase}") else: - print(resources["items"]) + print(f"{pod_name} pod not found") + + # resources = api.list_namespaced_custom_object(group="metrics.k8s.io", version="v1beta1", + # namespace=pod_namespace, plural="pods") + # + # pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) + # print(pod_searched) + # if len(pod_searched) > 0: + # pod_cpu = round(float("".join( + # filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) + # + # # Retrieve memory usage and convert it to GB + # + # pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) + # pod_memory_gib = pod_memory_kib / (1024 * 1024) + # gigabyte = 1.073741824 + # pod_memory_gb = pod_memory_gib * gigabyte + # + # result["cpu"] = pod_cpu + # result["memory"] = pod_memory_gb + # else: + # print(resources["items"]) except Exception as error: message = f"Unable to retrieve pod metrics: {error}" From fb0b7af172539020f1923bbdb2144e5be4cd86dc Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 11:33:41 +0200 Subject: [PATCH 24/73] [test update metric] - add sleep --- sos_trades_api/tools/kubernetes/kubernetes_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 2ad631da..de438e38 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -326,14 +326,15 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): plural="pods", async_req=True ) - # Attente que la requête asynchrone soit terminée + + time.sleep(5) while not async_request.ready(): - print("Attente des métriques...") + print("Waiting metric...") time.sleep(1) resources = async_request.get() + print(resources["items"]) - # Recherche des métriques pour le pod cible pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) print(pod_searched) if len(pod_searched) > 0: @@ -341,7 +342,6 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) # Retrieve memory usage and convert it to GB - pod_memory_kib = round( float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) pod_memory_gib = pod_memory_kib / (1024 * 1024) From 6a636b1efdfa8a73f4baccf428bfa89fefeea65d Mon Sep 17 00:00:00 2001 From: Matthew Watkins Date: Wed, 26 Jun 2024 09:36:06 +0100 Subject: [PATCH 25/73] docs: Important OS-Climate project announcement Signed-off-by: Matthew Watkins --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 09fcf828..557f3c34 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,7 @@ + +> [!IMPORTANT] +> On June 26 2024, Linux Foundation announced the merger of its financial services umbrella, the Fintech Open Source Foundation ([FINOS](https://finos.org)), with OS-Climate, an open source community dedicated to building data technologies, modeling, and analytic tools that will drive global capital flows into climate change mitigation and resilience; OS-Climate projects are in the process of transitioning to the [FINOS governance framework](https://community.finos.org/docs/governance); read more on [finos.org/press/finos-join-forces-os-open-source-climate-sustainability-esg](https://finos.org/press/finos-join-forces-os-open-source-climate-sustainability-esg) + ## Packages installation From a1e52ca7194c5c80cc0ac20f7616dbfb39d26b08 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 14:51:51 +0200 Subject: [PATCH 26/73] [test update metric] convert limits memory in GB --- sos_trades_api/tools/execution/execution_metrics.py | 3 ++- sos_trades_api/tools/kubernetes/kubernetes_service.py | 5 +---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 3a63c988..2a502377 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -83,7 +83,8 @@ def __update_database(self): if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: # Retrieve only numbers of limits cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) - memory_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["memory"]))) + memory_limits_gb = pod_execution_limit_from_config["memory"]/8 + memory_limits = str(''.join(re.findall(r'\d+', memory_limits_gb))) cpu_metric = f'{result["cpu"]}/{cpu_limits}' memory_metric = f'{result["memory"]}/{memory_limits} [GB]' diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index de438e38..77b9fd7a 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -327,10 +327,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): async_req=True ) - time.sleep(5) - while not async_request.ready(): - print("Waiting metric...") - time.sleep(1) + time.sleep(2) resources = async_request.get() print(resources["items"]) From 55bc12078ab244db1f36709b1e6c7ec855a1e337 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 15:29:11 +0200 Subject: [PATCH 27/73] [test update metric] convert limits memory in GB v2 --- sos_trades_api/tools/execution/execution_metrics.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 2a502377..46bc2fbf 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -83,8 +83,7 @@ def __update_database(self): if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: # Retrieve only numbers of limits cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) - memory_limits_gb = pod_execution_limit_from_config["memory"]/8 - memory_limits = str(''.join(re.findall(r'\d+', memory_limits_gb))) + memory_limits = str(int(''.join(re.findall(r'\d+', pod_execution_limit_from_config["memory"])))/8) cpu_metric = f'{result["cpu"]}/{cpu_limits}' memory_metric = f'{result["memory"]}/{memory_limits} [GB]' From a1e92f5017fbd4266166312b99be518ca299dacb Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 15:37:29 +0200 Subject: [PATCH 28/73] test update metric] - fix error --- sos_trades_api/tools/kubernetes/kubernetes_service.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 77b9fd7a..93f855c0 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -328,6 +328,9 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): ) time.sleep(2) + while not async_request.ready(): + print("Waiting metric...") + time.sleep(1) resources = async_request.get() print(resources["items"]) From 5e4722795f8034a2588691d76a89deef4d541fe2 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 16:36:39 +0200 Subject: [PATCH 29/73] [test update metric] - Add loop while --- .../tools/kubernetes/kubernetes_service.py | 75 +++++++++++-------- 1 file changed, 44 insertions(+), 31 deletions(-) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 93f855c0..f595dd94 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -305,6 +305,10 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): "cpu": "----", "memory": "----", } + max_wait_time = 10 # second + wait_time = 0 + polling_interval = 1 + # Create k8 api client object kubernetes_load_kube_config() try: @@ -319,37 +323,46 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): break if target_pod: if target_pod.status.phase == "Running": - async_request = api.list_namespaced_custom_object( - group="metrics.k8s.io", - version="v1beta1", - namespace=pod_namespace, - plural="pods", - async_req=True - ) - - time.sleep(2) - while not async_request.ready(): - print("Waiting metric...") - time.sleep(1) - - resources = async_request.get() - print(resources["items"]) - - pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - print(pod_searched) - if len(pod_searched) > 0: - pod_cpu = round(float("".join( - filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - - # Retrieve memory usage and convert it to GB - pod_memory_kib = round( - float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) - pod_memory_gib = pod_memory_kib / (1024 * 1024) - gigabyte = 1.073741824 - pod_memory_gb = pod_memory_gib * gigabyte - - result["cpu"] = pod_cpu - result["memory"] = pod_memory_gb + while wait_time < max_wait_time: + async_request = api.list_namespaced_custom_object( + group="metrics.k8s.io", + version="v1beta1", + namespace=pod_namespace, + plural="pods", + async_req=True + ) + + while not async_request.ready(): + print("Waiting metric...") + time.sleep(1) + print(f"Status: {target_pod.status.phase} - Name container: {target_pod.status.container_statuses[0].name}") + + resources = async_request.get() + print(resources["items"]) + + pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) + print(pod_searched) + if len(pod_searched) > 0: + pod_cpu = round(float("".join( + filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) + + # Retrieve memory usage and convert it to GB + pod_memory_kib = round( + float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) + pod_memory_gib = pod_memory_kib / (1024 * 1024) + gigabyte = 1.073741824 + pod_memory_gb = pod_memory_gib * gigabyte + + result["cpu"] = pod_cpu + result["memory"] = pod_memory_gb + break + else: + time.sleep(polling_interval) + wait_time += polling_interval + + if wait_time >= max_wait_time: + print(f"Max wait time {max_wait_time}s exceeded. Pod not found or not running.") + else: print(f" {pod_name} is not running. Status: {target_pod.status.phase}") else: From 429893cd769857a1abc3514d0b33454c60f9cf47 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 26 Jun 2024 17:26:30 +0200 Subject: [PATCH 30/73] [test update metric] - Increase waiting time --- sos_trades_api/tools/execution/execution_metrics.py | 1 + sos_trades_api/tools/kubernetes/kubernetes_service.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 46bc2fbf..f8be3a05 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -81,6 +81,7 @@ def __update_database(self): memory_limits = '' pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + print(f"memory limit from config {pod_execution_limit_from_config['memory']}") # Retrieve only numbers of limits cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) memory_limits = str(int(''.join(re.findall(r'\d+', pod_execution_limit_from_config["memory"])))/8) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index f595dd94..328c7e50 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -305,7 +305,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): "cpu": "----", "memory": "----", } - max_wait_time = 10 # second + max_wait_time = 15 # second wait_time = 0 polling_interval = 1 From 434084a5707805b0ffc32fac6630fe0b72fe81a1 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Wed, 26 Jun 2024 21:11:53 +0000 Subject: [PATCH 31/73] merge integration to validation From b356584691b91278b449fbcebcd44974cab7284a Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Thu, 27 Jun 2024 10:22:09 +0200 Subject: [PATCH 32/73] New ruff rules --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index f50b2bb7..254b49e1 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,6 +1,6 @@ [lint] +select = ["I", "TCH", "PLC", "PLE"] # extend-select = ["ALL"] -extend-select = ["I"] ignore = ["E722", "F841", "E501"] # E722 Do not use bare `except` From 85ff21f2b9013b7d5821453ce2da59d3c948f84e Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 27 Jun 2024 11:02:09 +0200 Subject: [PATCH 33/73] [test update metric] - convert in Gb from config --- sos_trades_api/tools/execution/execution_metrics.py | 13 ++++++++++--- .../tools/kubernetes/kubernetes_service.py | 2 +- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index f8be3a05..c137a536 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -77,14 +77,21 @@ def __update_database(self): print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') # Retrieve limits of pod from config - cpu_limits = '' - memory_limits = '' + cpu_limits = 'Not found from configuration' + memory_limits = 'Not found from configuration' pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: print(f"memory limit from config {pod_execution_limit_from_config['memory']}") + # Retrieve only numbers of limits + memory_limits_from_config = pod_execution_limit_from_config["memory"] + if "Mi" in memory_limits_from_config: + convert_to_gb = 1024 + else: + convert_to_gb = 8 + memory_limits = str(int(''.join(re.findall(r'\d+', memory_limits_from_config))) / convert_to_gb) + cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) - memory_limits = str(int(''.join(re.findall(r'\d+', pod_execution_limit_from_config["memory"])))/8) cpu_metric = f'{result["cpu"]}/{cpu_limits}' memory_metric = f'{result["memory"]}/{memory_limits} [GB]' diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 328c7e50..7e4db0df 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -354,7 +354,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): pod_memory_gb = pod_memory_gib * gigabyte result["cpu"] = pod_cpu - result["memory"] = pod_memory_gb + result["memory"] = round(pod_memory_gb, 2) break else: time.sleep(polling_interval) From 1256f4984851d9f158859bfa4f46ef9401fb46b0 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 27 Jun 2024 14:54:00 +0200 Subject: [PATCH 34/73] [test update metric] - fix error conversion --- sos_trades_api/tools/kubernetes/kubernetes_service.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 7e4db0df..e037eb3c 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -346,12 +346,12 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): pod_cpu = round(float("".join( filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - # Retrieve memory usage and convert it to GB + # Retrieve memory usage and convert it to gigabit pod_memory_kib = round( float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) pod_memory_gib = pod_memory_kib / (1024 * 1024) - gigabyte = 1.073741824 - pod_memory_gb = pod_memory_gib * gigabyte + + pod_memory_gb = pod_memory_gib / 8 result["cpu"] = pod_cpu result["memory"] = round(pod_memory_gb, 2) From 8e4a7551e54b9259f707f7ac92d929f18127227e Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 27 Jun 2024 15:35:14 +0200 Subject: [PATCH 35/73] [test update metric] count request execution metric --- .../tools/execution/execution_metrics.py | 3 ++ .../tools/kubernetes/kubernetes_service.py | 28 +------------------ 2 files changed, 4 insertions(+), 27 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index c137a536..600e17f5 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -58,9 +58,11 @@ def __update_database(self): """ # Infinite loop # The database connection is kept open + count_retry = 0 while self.__started: # Add an exception manager to ensure that database eoor will not # shut down calculation + count_retry += 1 try: # Open a database context with app.app_context(): @@ -118,3 +120,4 @@ def __update_database(self): # Wait 2 seconds before next metrics if self.__started: time.sleep(2) + print(f"retry = {count_retry}") \ No newline at end of file diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index e037eb3c..a8917cd7 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -305,7 +305,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): "cpu": "----", "memory": "----", } - max_wait_time = 15 # second + max_wait_time = 20 # second wait_time = 0 polling_interval = 1 @@ -333,15 +333,10 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): ) while not async_request.ready(): - print("Waiting metric...") time.sleep(1) - print(f"Status: {target_pod.status.phase} - Name container: {target_pod.status.container_statuses[0].name}") resources = async_request.get() - print(resources["items"]) - pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - print(pod_searched) if len(pod_searched) > 0: pod_cpu = round(float("".join( filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) @@ -368,27 +363,6 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): else: print(f"{pod_name} pod not found") - # resources = api.list_namespaced_custom_object(group="metrics.k8s.io", version="v1beta1", - # namespace=pod_namespace, plural="pods") - # - # pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - # print(pod_searched) - # if len(pod_searched) > 0: - # pod_cpu = round(float("".join( - # filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - # - # # Retrieve memory usage and convert it to GB - # - # pod_memory_kib = round(float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))),2) - # pod_memory_gib = pod_memory_kib / (1024 * 1024) - # gigabyte = 1.073741824 - # pod_memory_gb = pod_memory_gib * gigabyte - # - # result["cpu"] = pod_cpu - # result["memory"] = pod_memory_gb - # else: - # print(resources["items"]) - except Exception as error: message = f"Unable to retrieve pod metrics: {error}" app.logger.error(message) From dd888362c10b5f8dc3f376f9de470c97c48fd124 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Thu, 27 Jun 2024 17:19:25 +0200 Subject: [PATCH 36/73] Removed old requirements file --- api.requirements.txt | 459 ------------------------------------------- 1 file changed, 459 deletions(-) delete mode 100644 api.requirements.txt diff --git a/api.requirements.txt b/api.requirements.txt deleted file mode 100644 index fdfddf28..00000000 --- a/api.requirements.txt +++ /dev/null @@ -1,459 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --output-file=./sostrades-webapi/api.requirements.txt ./GreenProductPortfolio/requirements.in ./gemseo/requirements.txt ./industrial-zones/requirements.in ./portfolio-management/requirements.in ./sostrades-core/requirements.in ./sostrades-webapi/requirements.in ./witness-core/requirements.in ./witness-energy/requirements.in -# -alembic==1.13.1 - # via flask-migrate -autograd==1.6.2 - # via pymoo -bidict==0.23.1 - # via python-socketio -black==22.12.0 - # via -r ./sostrades-core/requirements.in -cachetools==5.3.3 - # via google-auth -certifi==2024.6.2 - # via - # kubernetes - # requests -cffi==1.16.0 - # via cryptography -chaospy==4.3.15 - # via -r ./sostrades-core/requirements.in -charset-normalizer==3.3.2 - # via requests -click==8.1.7 - # via - # -r ./sostrades-webapi/requirements.in - # black - # flask -cma==2.7.0 - # via - # -r ./sostrades-core/requirements.in - # pymoo -coverage[toml]==7.5.3 - # via pytest-cov -cryptography==42.0.8 - # via jwcrypto -custom-inherit==2.4.0 - # via -r ./gemseo/requirements.txt -cvxpy==1.1.18 - # via -r ./sostrades-core/requirements.in -cycler==0.12.1 - # via matplotlib -decorator==5.1.1 - # via networkx -defusedxml==0.7.1 - # via python3-saml -deprecation==2.1.0 - # via python-keycloak -dill==0.3.8 - # via openturns -dnspython==2.6.1 - # via eventlet -ecos==2.0.14 - # via cvxpy -et-xmlfile==1.1.0 - # via openpyxl -eventlet==0.33.3 - # via -r ./sostrades-webapi/requirements.in -exceptiongroup==1.2.1 - # via pytest -execnet==2.1.1 - # via pytest-xdist -fastjsonschema==2.15.1 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -flask==1.1.1 - # via - # -r ./sostrades-webapi/requirements.in - # flask-jwt-extended - # flask-login - # flask-migrate - # flask-socketio - # flask-sqlalchemy -flask-jwt-extended==3.24.1 - # via -r ./sostrades-webapi/requirements.in -flask-login==0.5.0 - # via -r ./sostrades-webapi/requirements.in -flask-migrate==2.5.2 - # via -r ./sostrades-webapi/requirements.in -flask-socketio==5.3.6 - # via -r ./sostrades-webapi/requirements.in -flask-sqlalchemy==2.4.1 - # via - # -r ./sostrades-webapi/requirements.in - # flask-migrate -furl==2.1.3 - # via -r ./sostrades-webapi/requirements.in -future==1.0.0 - # via - # -r ./gemseo/requirements.txt - # autograd -genson==1.2.2 - # via -r ./gemseo/requirements.txt -gitdb==4.0.11 - # via gitpython -gitpython==3.1.43 - # via - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in -google-auth==2.30.0 - # via kubernetes -graphviz==0.16 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-webapi/requirements.in -greenlet==3.0.3 - # via eventlet -h5py==3.2.1 - # via -r ./gemseo/requirements.txt -idna==3.7 - # via requests -importlib-metadata==7.1.0 - # via - # chaospy - # numpoly - # python-arango -iniconfig==2.0.0 - # via pytest -isodate==0.6.1 - # via python3-saml -itsdangerous==2.0.1 - # via - # -r ./sostrades-webapi/requirements.in - # flask -jinja2==3.0.1 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-webapi/requirements.in - # flask -joblib==1.4.2 - # via scikit-learn -jwcrypto==1.5.6 - # via python-keycloak -kiwisolver==1.4.5 - # via matplotlib -kubernetes==29.0.0 - # via -r ./sostrades-webapi/requirements.in -lxml==5.2.2 - # via xmlsec -mako==1.3.5 - # via alembic -markupsafe==2.1.5 - # via - # jinja2 - # mako -matplotlib==3.4.3 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in - # pymoo - # seaborn -mpmath==1.3.0 - # via sympy -mypy-extensions==1.0.0 - # via black -mysqlclient==2.2.0 - # via -r ./sostrades-webapi/requirements.in -networkx==2.5 - # via -r ./gemseo/requirements.txt -nlopt==2.7.0 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -numpoly==1.2.12 - # via chaospy -numpy==1.24.4 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in - # autograd - # chaospy - # cvxpy - # ecos - # h5py - # matplotlib - # nlopt - # numpoly - # numpy-financial - # osqp - # pandas - # pdfo - # pydoe2 - # pymoo - # pyxdsm - # qdldl - # scikit-learn - # scipy - # scs - # seaborn -numpy-financial==1.0.0 - # via -r ./portfolio-management/requirements.in -oauthlib==3.2.2 - # via - # kubernetes - # requests-oauthlib -openpyxl==3.0.7 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -openturns==1.18 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in -orderedmultidict==1.0.1 - # via furl -osqp==0.6.7 - # via cvxpy -packaging==21.3 - # via - # -r ./gemseo/requirements.txt - # deprecation - # pytest -pandas==2.2.2 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./gemseo/requirements.txt - # -r ./portfolio-management/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in - # seaborn -pathspec==0.12.1 - # via black -pdfo==1.0 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -pillow==10.3.0 - # via matplotlib -pint==0.23 - # via -r ./industrial-zones/requirements.in -platformdirs==4.2.2 - # via black -plotly==5.3.0 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in -pluggy==1.5.0 - # via pytest -psutil==5.9.5 - # via - # -r ./sostrades-webapi/requirements.in - # openturns -pyasn1==0.6.0 - # via - # pyasn1-modules - # python-ldap - # rsa -pyasn1-modules==0.4.0 - # via - # google-auth - # python-ldap -pycparser==2.22 - # via cffi -pycryptodome==3.19.1 - # via -r ./sostrades-core/requirements.in -pydoe2==1.3.0 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -pyjwt==1.7.1 - # via - # -r ./sostrades-webapi/requirements.in - # flask-jwt-extended - # python-arango -pymoo==0.5.0 - # via -r ./gemseo/requirements.txt -pyparsing==3.1.2 - # via - # matplotlib - # packaging -pyside2==5.15.2 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -pytest==7.4.3 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./portfolio-management/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in - # pytest-cov - # pytest-durations - # pytest-xdist -pytest-cov==4.1.0 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./portfolio-management/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in -pytest-durations==1.2.0 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./portfolio-management/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in -pytest-xdist==3.4.0 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./portfolio-management/requirements.in - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in -python-arango==7.5.8 - # via -r ./sostrades-core/requirements.in -python-dateutil==2.9.0.post0 - # via - # kubernetes - # matplotlib - # pandas -python-dotenv==0.12.0 - # via -r ./sostrades-webapi/requirements.in -python-engineio==4.5.1 - # via - # -r ./sostrades-webapi/requirements.in - # python-socketio -python-keycloak==4.0.0 - # via -r ./sostrades-webapi/requirements.in -python-ldap==3.4.0 ; platform_system != "Windows" - # via -r ./sostrades-webapi/requirements.in -python-socketio==5.8.0 - # via - # -r ./sostrades-webapi/requirements.in - # flask-socketio -python3-saml==1.9.0 - # via -r ./sostrades-webapi/requirements.in -pytz==2023.3.post1 - # via - # -r ./sostrades-webapi/requirements.in - # pandas -pyxdsm==2.2.0 ; python_version >= "3" - # via -r ./gemseo/requirements.txt -pyyaml==6.0.1 - # via - # -r ./sostrades-core/requirements.in - # -r ./sostrades-webapi/requirements.in - # kubernetes -qdldl==0.1.7.post3 - # via osqp -requests==2.31.0 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-webapi/requirements.in - # kubernetes - # python-arango - # python-keycloak - # requests-oauthlib - # requests-toolbelt -requests-oauthlib==2.0.0 - # via kubernetes -requests-toolbelt==1.0.0 - # via - # python-arango - # python-keycloak -rsa==4.9 - # via google-auth -scikit-learn==1.0.1 - # via -r ./gemseo/requirements.txt -scipy==1.10.1 - # via - # -r ./GreenProductPortfolio/requirements.in - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in - # -r ./witness-core/requirements.in - # -r ./witness-energy/requirements.in - # chaospy - # cvxpy - # ecos - # osqp - # pydoe2 - # pymoo - # qdldl - # scikit-learn - # scs - # seaborn -scs==3.2.4.post3 - # via cvxpy -seaborn==0.9.0 - # via -r ./witness-core/requirements.in -shiboken2==5.15.2 - # via pyside2 -simplejson==3.19.2 - # via -r ./sostrades-webapi/requirements.in -six==1.16.0 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in - # eventlet - # flask-jwt-extended - # furl - # isodate - # kubernetes - # orderedmultidict - # plotly - # python-dateutil -smmap==5.0.1 - # via gitdb -sqlalchemy==1.3.13 - # via - # -r ./sostrades-webapi/requirements.in - # alembic - # flask-sqlalchemy -sympy==1.4 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in -tenacity==8.4.1 - # via plotly -threadpoolctl==3.5.0 - # via scikit-learn -tomli==2.0.1 - # via - # black - # coverage - # pytest -tqdm==4.61.0 - # via - # -r ./gemseo/requirements.txt - # -r ./sostrades-core/requirements.in - # -r ./witness-core/requirements.in -typing-extensions==4.12.2 - # via - # alembic - # black - # jwcrypto - # pint -tzdata==2024.1 - # via pandas -urllib3==2.1.0 - # via - # -r ./sostrades-webapi/requirements.in - # kubernetes - # python-arango - # requests -websocket-client==1.8.0 - # via kubernetes -werkzeug==2.0.3 - # via - # -r ./sostrades-webapi/requirements.in - # flask - # flask-jwt-extended -xdsmjs==1.0.1 - # via -r ./gemseo/requirements.txt -xmlsec==1.3.14 - # via python3-saml -zipp==3.19.2 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -# setuptools From 7a8338783421c7bab5d46d75e9dacc6067f0384f Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Fri, 28 Jun 2024 09:36:42 +0200 Subject: [PATCH 37/73] Added default ruff rule selection --- ruff.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ruff.toml b/ruff.toml index 254b49e1..31ba521e 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,5 +1,5 @@ [lint] -select = ["I", "TCH", "PLC", "PLE"] +select = ["I", "TCH", "PLC", "PLE", "F", "E"] # extend-select = ["ALL"] ignore = ["E722", "F841", "E501"] From 3abf2f0ccc690255797e3820ad4b384533b16cd2 Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 28 Jun 2024 14:20:31 +0200 Subject: [PATCH 38/73] fix notification import dataset message and mapping catch error --- .../controllers/sostrades_data/study_case_controller.py | 2 +- sos_trades_api/tools/loading/loading_study_and_engine.py | 7 +++++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/sos_trades_api/controllers/sostrades_data/study_case_controller.py b/sos_trades_api/controllers/sostrades_data/study_case_controller.py index 1a3334ca..32dcfb08 100644 --- a/sos_trades_api/controllers/sostrades_data/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_data/study_case_controller.py @@ -851,7 +851,7 @@ def create_new_notification_after_update_parameter(study_id, change_type, coedit # Determine the coedition message based on the type if change_type == StudyCaseChange.DATASET_MAPPING_CHANGE and user_coedition_action == UserCoeditionAction.SAVE: coedition_message = CoeditionMessage.IMPORT_DATASET - if change_type == StudyCaseChange.DATASET_MAPPING_EXPORT and user_coedition_action == UserCoeditionAction.EXPORT: + elif change_type == StudyCaseChange.DATASET_MAPPING_EXPORT and user_coedition_action == UserCoeditionAction.EXPORT: coedition_message = CoeditionMessage.EXPORT_DATASET else: coedition_message = CoeditionMessage.SAVE diff --git a/sos_trades_api/tools/loading/loading_study_and_engine.py b/sos_trades_api/tools/loading/loading_study_and_engine.py index 96921fc1..179c670b 100644 --- a/sos_trades_api/tools/loading/loading_study_and_engine.py +++ b/sos_trades_api/tools/loading/loading_study_and_engine.py @@ -26,6 +26,7 @@ import pandas from eventlet import sleep from numpy import ndarray +from sostrades_core.datasets.dataset_mapping import DatasetsMappingException from sostrades_core.execution_engine.proxy_discipline import ProxyDiscipline from sostrades_core.tools.rw.load_dump_dm_data import DirectLoadDump from sostrades_core.tools.tree.serializer import DataSerializer @@ -448,6 +449,12 @@ def study_case_manager_export_from_dataset_mapping(study_case_manager, datasets_ app.logger.exception( f"Error when exporting in background (from datasets mapping) {study_case_manager.study.name}: {ex}") + except DatasetsMappingException as ex: + study_case_manager.dataset_export_error_dict[notification_id] = f"{ex}" + study_case_manager.dataset_export_status_dict[notification_id] = LoadStatus.IN_ERROR + + app.logger.exception( + f"Error when exporting in background (from datasets mapping) {study_case_manager.study.name}: {ex}") From a16812fdf3c99ccd8e973ffd8cda8fee7811172a Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Fri, 28 Jun 2024 14:46:24 +0200 Subject: [PATCH 39/73] [update metric] add functions to convert bit to byte --- sos_trades_api/tools/code_tools.py | 74 +++++++++++++++ .../tools/execution/execution_metrics.py | 39 ++++---- .../tools/kubernetes/kubernetes_service.py | 89 +++++++++---------- 3 files changed, 134 insertions(+), 68 deletions(-) diff --git a/sos_trades_api/tools/code_tools.py b/sos_trades_api/tools/code_tools.py index 034e09bd..cd91812e 100644 --- a/sos_trades_api/tools/code_tools.py +++ b/sos_trades_api/tools/code_tools.py @@ -17,6 +17,7 @@ import logging import os +import re from time import time from typing import Optional @@ -112,3 +113,76 @@ def file_tail(file_name, line_count, encoding="utf-8"): app.logger.debug(f"Done parsing logs {file_name}.") return lines + + +def convert_bit_into_byte(bit: float, unit_bit: str, unit_byte: str) -> float: + """ + Convert a given amount of bits into bytes based on specified units. + + :param bit: The amount of bits to convert. + :type bit: float + :param unit_bit: The unit of the input bit value. + :type unit_bit: str + :param unit_byte: The unit of the output byte value. + :type unit_byte: str + :return: The converted value in bytes. + :rtype: float + """ + + byte = None + + # Conversion factors + kibibit_to_megabyte = 1 / (8 * 1024) + kibibit_to_gigabyte = 1 / (8 * 1024 * 1024) + megabit_to_megabyte = 1 / 8 + megabit_to_gigabyte = 1 / (8 * 1024) + gigabit_to_gigabyte = 1 / 8 + + if unit_bit.lower() == "mi" or unit_bit.lower() == "megabit": + + # Convert Megabit to Megabyte + if unit_byte.lower() == "mb" or unit_bit.lower() == "megabyte": + byte = bit * megabit_to_megabyte + + # Convert Megabit to Gigabyte + elif unit_byte == "gb" or unit_byte.lower() == "gigabyte": + byte = bit * megabit_to_gigabyte + + elif unit_bit.lower() == "gi" or unit_bit.lower() == "gigabit": + + # Convert Gigabit to Gigabyte + if unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": + byte = bit * gigabit_to_gigabyte + + elif unit_bit.lower() == "ki" or unit_bit.lower() == "kibibit": + # Convert kibibit to Megabyte + if unit_byte.lower() == "mb" or unit_byte.lower() == "megabyte": + byte = bit * kibibit_to_megabyte + + # Convert kibibit to Gigabyte + elif unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": + byte = bit * kibibit_to_gigabyte + + return byte + + +def extract_number_and_unit(input_string: str) -> tuple: + """ + Extracts the number and unit from a given string. + + Args: + input_string (str): The string from which to extract the number. + + :return: A tuple containing the number and the unit. + :rtype: tuple (number: int, unit: str) + """ + + # Use a regular expression to extract the number and the unit + match = re.match(r"(\d+)\s*([a-zA-Z]+)", input_string.strip()) + if not match: + raise ValueError("The input string must contain both a number and a unit.") + + number = int(match.group(1)) + unit = match.group(2) + + return number, unit diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 600e17f5..3f27381c 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -22,6 +22,7 @@ from sos_trades_api.config import Config from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation from sos_trades_api.server.base_server import app, db +from sos_trades_api.tools.code_tools import extract_number_and_unit, convert_bit_into_byte from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info """ @@ -58,45 +59,46 @@ def __update_database(self): """ # Infinite loop # The database connection is kept open - count_retry = 0 while self.__started: # Add an exception manager to ensure that database eoor will not # shut down calculation - count_retry += 1 try: # Open a database context with app.app_context(): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() config = Config() - print(f"config => : {config.execution_strategy}") if config.execution_strategy == Config.CONFIG_EXECUTION_STRATEGY_K8S: study_case_allocation = PodAllocation.query.filter(PodAllocation.identifier == study_case_execution.study_case_id).filter( PodAllocation.pod_type == PodAllocation.TYPE_EXECUTION, ).first() - print(f'pod allocation => : pod name {study_case_allocation.kubernetes_pod_name} + namespace {study_case_allocation.kubernetes_pod_namespace}') - # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace) - print(f'result from kubernetes => : cpu {result["cpu"]} + memory {result["memory"]}') # Retrieve limits of pod from config cpu_limits = 'Not found from configuration' memory_limits = 'Not found from configuration' + unit_byte_target = "GB" pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] - if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: - print(f"memory limit from config {pod_execution_limit_from_config['memory']}") - # Retrieve only numbers of limits + if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + # CPU limits + cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) + # Retrieve and convert memory limits memory_limits_from_config = pod_execution_limit_from_config["memory"] - if "Mi" in memory_limits_from_config: - convert_to_gb = 1024 - else: - convert_to_gb = 8 - memory_limits = str(int(''.join(re.findall(r'\d+', memory_limits_from_config))) / convert_to_gb) - cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) + if "mi" in memory_limits_from_config.lower(): + unit_byte_target = "MB" + + # Retrieve and extract limit and its unit + memory_limits_bit, memory_limits_unit_bit = extract_number_and_unit(memory_limits_from_config) + memory_limits_byte_converted = convert_bit_into_byte(memory_limits_bit, memory_limits_unit_bit, + unit_byte_target) + if memory_limits_byte_converted is not None: + memory_limits = round(memory_limits_byte_converted, 2) + + # Retrieve memory and cpu from kubernetes + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_target) cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [GB]' + memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_target}]' else: # Check environment info @@ -119,5 +121,4 @@ def __update_database(self): finally: # Wait 2 seconds before next metrics if self.__started: - time.sleep(2) - print(f"retry = {count_retry}") \ No newline at end of file + time.sleep(2) \ No newline at end of file diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index a8917cd7..155d0ea3 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -20,6 +20,7 @@ from kubernetes import client, config, watch from sos_trades_api.server.base_server import app +from sos_trades_api.tools.code_tools import convert_bit_into_byte, extract_number_and_unit """ Execution engine kubernete @@ -291,7 +292,7 @@ def kubernetes_load_kube_config(): raise ExecutionEngineKuberneteError(message) -def kubernetes_get_pod_info(pod_name, pod_namespace): +def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_target: str): """ get pod usage info like cpu and memory :param pod_name: unique name of the pod => metadata.name @@ -299,6 +300,10 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): :param pod_namespace: namespace where is the pod :type pod_namespace: str + + :param unit_byte_target: unit in byte targeted + :type unit_byte_target: str + :return: dict with cpu usage (number of cpu) and memory usage (Go) """ result = { @@ -312,56 +317,42 @@ def kubernetes_get_pod_info(pod_name, pod_namespace): # Create k8 api client object kubernetes_load_kube_config() try: - v1 = client.CoreV1Api() api = client.CustomObjectsApi() - pods = v1.list_namespaced_pod(pod_namespace) - target_pod = None - for pod in pods.items: - if pod.metadata.name == pod_name: - target_pod = pod - break - if target_pod: - if target_pod.status.phase == "Running": - while wait_time < max_wait_time: - async_request = api.list_namespaced_custom_object( - group="metrics.k8s.io", - version="v1beta1", - namespace=pod_namespace, - plural="pods", - async_req=True - ) - - while not async_request.ready(): - time.sleep(1) - - resources = async_request.get() - pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - if len(pod_searched) > 0: - pod_cpu = round(float("".join( - filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["cpu"]))) / 1e9, 2) - - # Retrieve memory usage and convert it to gigabit - pod_memory_kib = round( - float("".join(filter(str.isdigit, pod_searched[0]["containers"][0]["usage"]["memory"]))), 2) - pod_memory_gib = pod_memory_kib / (1024 * 1024) - - pod_memory_gb = pod_memory_gib / 8 - - result["cpu"] = pod_cpu - result["memory"] = round(pod_memory_gb, 2) - break - else: - time.sleep(polling_interval) - wait_time += polling_interval - - if wait_time >= max_wait_time: - print(f"Max wait time {max_wait_time}s exceeded. Pod not found or not running.") + while wait_time < max_wait_time: + async_request = api.list_namespaced_custom_object( + group="metrics.k8s.io", + version="v1beta1", + namespace=pod_namespace, + plural="pods", + async_req=True + ) + + while not async_request.ready(): + time.sleep(1) + + resources = async_request.get() + pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) + if len(pod_searched) > 0: + + # Retrieve cpu (in nanocores) and unit and convert it in CPU + pod_cpu_nanocores, pod_cpu_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["cpu"]) + pod_cpu = round(pod_cpu_nanocores / 1e9, 2) + + # Retrieve memory usage and convert it to gigabit + pod_memory_kib, pod_memory_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["memory"]) + pod_memory_converted = convert_bit_into_byte(pod_memory_kib, pod_memory_unit, unit_byte_target) + + result["cpu"] = pod_cpu + result["memory"] = round(pod_memory_converted, 2) + break else: - print(f" {pod_name} is not running. Status: {target_pod.status.phase}") - else: - print(f"{pod_name} pod not found") + time.sleep(polling_interval) + wait_time += polling_interval + + if wait_time >= max_wait_time: + print(f"Max wait time {max_wait_time}s to load metrics exceeded") except Exception as error: message = f"Unable to retrieve pod metrics: {error}" @@ -431,13 +422,13 @@ def watch_pod_events(logger, namespace): event['object']['metadata']['name'].startswith('generation') : yield event - + logger.info("Finished namespace stream.") except urllib3.exceptions.ReadTimeoutError as exception: #time out, the watcher will be restarted pass - + def get_pod_name_from_event(event): return event["object"]["metadata"]["name"] From 388841897eb0b164b32148872f1d3f1c2a688f7e Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Fri, 28 Jun 2024 21:11:12 +0000 Subject: [PATCH 40/73] merge integration to validation From 6b4a742e7a4942c0b34972782c45ffc0dd074463 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Mon, 1 Jul 2024 09:36:42 +0200 Subject: [PATCH 41/73] [update metric] update variable --- sos_trades_api/config.py | 7 ++++--- .../tools/execution/execution_metrics.py | 16 ++++++++-------- .../tools/kubernetes/kubernetes_service.py | 8 ++++---- 3 files changed, 16 insertions(+), 15 deletions(-) diff --git a/sos_trades_api/config.py b/sos_trades_api/config.py index ba5aa765..1f6df1e7 100644 --- a/sos_trades_api/config.py +++ b/sos_trades_api/config.py @@ -64,6 +64,7 @@ class Config: CONFIG_LOCAL_FOLDER_PATH = "SOS_TRADES_LOCAL_FOLDER" CONFIG_FLAVOR_KUBERNETES = "CONFIG_FLAVOR_KUBERNETES" CONFIG_ACTIVATE_POD_WATCHER = "ACTIVATE_POD_WATCHER" + CONFIG_FLAVOR_POD_EXECUTION = "PodExec" def __init__(self): """ @@ -640,12 +641,12 @@ def kubernetes_flavor_config_for_exec(self): kubernetes_flavor = self.__server_config_file[self.CONFIG_FLAVOR_KUBERNETES] - if "PodExec" not in kubernetes_flavor.keys(): + if self.CONFIG_FLAVOR_POD_EXECUTION not in kubernetes_flavor.keys(): raise KeyError("PodExec is not in CONFIG_FLAVOR_KUBERNETES") - self.__validate_flavor(kubernetes_flavor["PodExec"]) + self.__validate_flavor(kubernetes_flavor[self.CONFIG_FLAVOR_POD_EXECUTION]) - self.__kubernetes_flavor_for_exec = kubernetes_flavor["PodExec"] + self.__kubernetes_flavor_for_exec = kubernetes_flavor[self.CONFIG_FLAVOR_POD_EXECUTION] return self.__kubernetes_flavor_for_exec diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 3f27381c..bb01dbf9 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -73,10 +73,10 @@ def __update_database(self): ).first() # Retrieve limits of pod from config - cpu_limits = 'Not found from configuration' - memory_limits = 'Not found from configuration' - unit_byte_target = "GB" - pod_execution_limit_from_config = app.config["CONFIG_FLAVOR_KUBERNETES"]["PodExec"][study_case_allocation.flavor]["limits"] + cpu_limits = '----' + memory_limits = '----' + unit_byte_to_conversion = "GB" + pod_execution_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"] if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: # CPU limits @@ -85,20 +85,20 @@ def __update_database(self): memory_limits_from_config = pod_execution_limit_from_config["memory"] if "mi" in memory_limits_from_config.lower(): - unit_byte_target = "MB" + unit_byte_to_conversion = "MB" # Retrieve and extract limit and its unit memory_limits_bit, memory_limits_unit_bit = extract_number_and_unit(memory_limits_from_config) memory_limits_byte_converted = convert_bit_into_byte(memory_limits_bit, memory_limits_unit_bit, - unit_byte_target) + unit_byte_to_conversion) if memory_limits_byte_converted is not None: memory_limits = round(memory_limits_byte_converted, 2) # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_target) + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion) cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_target}]' + memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_to_conversion}]' else: # Check environment info diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 155d0ea3..a14d896d 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -292,7 +292,7 @@ def kubernetes_load_kube_config(): raise ExecutionEngineKuberneteError(message) -def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_target: str): +def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: str): """ get pod usage info like cpu and memory :param pod_name: unique name of the pod => metadata.name @@ -301,8 +301,8 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_target: str): :param pod_namespace: namespace where is the pod :type pod_namespace: str - :param unit_byte_target: unit in byte targeted - :type unit_byte_target: str + :param unit_byte_to_conversion: unit in byte targeted + :type unit_byte_to_conversion: str :return: dict with cpu usage (number of cpu) and memory usage (Go) """ @@ -342,7 +342,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_target: str): # Retrieve memory usage and convert it to gigabit pod_memory_kib, pod_memory_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["memory"]) - pod_memory_converted = convert_bit_into_byte(pod_memory_kib, pod_memory_unit, unit_byte_target) + pod_memory_converted = convert_bit_into_byte(pod_memory_kib, pod_memory_unit, unit_byte_to_conversion) result["cpu"] = pod_cpu result["memory"] = round(pod_memory_converted, 2) From 8f4a680b0faa9b22876a78d567fcf786944ce4e6 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Mon, 1 Jul 2024 10:11:02 +0200 Subject: [PATCH 42/73] Update fonction code tool --- sos_trades_api/tools/code_tools.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sos_trades_api/tools/code_tools.py b/sos_trades_api/tools/code_tools.py index cd91812e..0a1f37d0 100644 --- a/sos_trades_api/tools/code_tools.py +++ b/sos_trades_api/tools/code_tools.py @@ -178,11 +178,12 @@ def extract_number_and_unit(input_string: str) -> tuple: """ # Use a regular expression to extract the number and the unit - match = re.match(r"(\d+)\s*([a-zA-Z]+)", input_string.strip()) + match = re.match(r"(\d+[\.,]?\d*)\s*([a-zA-Z]+)", input_string.strip()) if not match: raise ValueError("The input string must contain both a number and a unit.") - number = int(match.group(1)) + # Replace comma with a dot to handle decimal numbers correctly + number = float(match.group(1).replace(',', '.')) unit = match.group(2) return number, unit From f8d4dda47d6e70385edd2ab878e06611f5c1a823 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Mon, 1 Jul 2024 21:11:00 +0000 Subject: [PATCH 43/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index 841e00df..4720facc 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Fri Jun 28 21:11:12 UTC 2024 +Mon Jul 1 21:11:00 UTC 2024 From 6ea36c7e709550a8892f7d50ad712de53c40a75e Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Mon, 1 Jul 2024 21:11:00 +0000 Subject: [PATCH 44/73] merge integration to validation From 9d975f62e74b3ba0d440b0e25a96ce75c495c2fc Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 2 Jul 2024 15:14:41 +0200 Subject: [PATCH 45/73] Updated requests version --- requirements.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.in b/requirements.in index b3a360af..86ead867 100644 --- a/requirements.in +++ b/requirements.in @@ -31,7 +31,7 @@ python-socketio==5.8.0 # Must be added to freeze version, so message server wor python3-saml==1.9.0 pytz==2023.3.post1 PyYAML==6.0.1 -requests==2.31.0 +requests==2.32.3 simplejson==3.19.2 SQLAlchemy==1.3.13 urllib3==2.1.0 From f517051e61b456b301876021a0467ccbec1f01a4 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 2 Jul 2024 21:10:56 +0000 Subject: [PATCH 46/73] merge integration to validation From ecbfdfb5da4d0f3786cc7e40fdf7431110ddcc00 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Wed, 3 Jul 2024 10:49:30 +0200 Subject: [PATCH 47/73] Updated credits version --- CREDITS.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CREDITS.rst b/CREDITS.rst index 7417a095..63684b25 100644 --- a/CREDITS.rst +++ b/CREDITS.rst @@ -81,7 +81,7 @@ sostrades-webapi depends on software with compatible licenses that are listed be `PyYAML (6.0.1) `_ MIT -`requests (2.31.0) `_ +`requests (2.32.3) `_ Apache 2.0 `simplejson (3.19.2) `_ From a75da4501257d2e465410e8861b23361ccb79328 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 3 Jul 2024 10:54:16 +0200 Subject: [PATCH 48/73] [test update metric] update conversion and add count of retry --- sos_trades_api/tools/code_tools.py | 59 +++++----- .../tools/execution/execution_metrics.py | 23 ++-- .../tools/kubernetes/kubernetes_service.py | 101 ++++++++++++------ 3 files changed, 111 insertions(+), 72 deletions(-) diff --git a/sos_trades_api/tools/code_tools.py b/sos_trades_api/tools/code_tools.py index 0a1f37d0..02719ae7 100644 --- a/sos_trades_api/tools/code_tools.py +++ b/sos_trades_api/tools/code_tools.py @@ -115,66 +115,67 @@ def file_tail(file_name, line_count, encoding="utf-8"): return lines -def convert_bit_into_byte(bit: float, unit_bit: str, unit_byte: str) -> float: +def convert_byte_into_byte_unit_targeted(byte: float, unit_bibit: str, unit_byte: str) -> float: """ + :Summary: Convert a given amount of bits into bytes based on specified units. - :param bit: The amount of bits to convert. - :type bit: float - :param unit_bit: The unit of the input bit value. - :type unit_bit: str - :param unit_byte: The unit of the output byte value. - :type unit_byte: str - :return: The converted value in bytes. + :Args: + byte (float): The amount of bits to convert. + unit_bit (str): The unit of the input bit value. + unit_byte (str): The unit of the output byte value. + + :Return: The converted value in bytes. :rtype: float - """ + """ - byte = None + byte_converted = None # Conversion factors - kibibit_to_megabyte = 1 / (8 * 1024) - kibibit_to_gigabyte = 1 / (8 * 1024 * 1024) - megabit_to_megabyte = 1 / 8 - megabit_to_gigabyte = 1 / (8 * 1024) - gigabit_to_gigabyte = 1 / 8 + kibibit_to_megabit = 1 / 976.6 + kibibit_to_gigabit = 1 / 976600 + mebibit_to_megabit = 1.049 + mebibit_to_gigabit = 1 / 953.7 + gibibit_to_gigabit = 1.074 - if unit_bit.lower() == "mi" or unit_bit.lower() == "megabit": + if unit_bibit.lower() == "mi" or unit_bibit.lower() == "megabit": # Convert Megabit to Megabyte - if unit_byte.lower() == "mb" or unit_bit.lower() == "megabyte": - byte = bit * megabit_to_megabyte + if unit_byte.lower() == "mb" or unit_bibit.lower() == "megabyte": + byte_converted = byte * mebibit_to_megabit # Convert Megabit to Gigabyte elif unit_byte == "gb" or unit_byte.lower() == "gigabyte": - byte = bit * megabit_to_gigabyte + byte_converted = byte * mebibit_to_gigabit - elif unit_bit.lower() == "gi" or unit_bit.lower() == "gigabit": + elif unit_bibit.lower() == "gi" or unit_bibit.lower() == "gigabit": # Convert Gigabit to Gigabyte if unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": - byte = bit * gigabit_to_gigabyte + byte_converted = byte * gibibit_to_gigabit - elif unit_bit.lower() == "ki" or unit_bit.lower() == "kibibit": + elif unit_bibit.lower() == "ki" or unit_bibit.lower() == "kibibit": # Convert kibibit to Megabyte if unit_byte.lower() == "mb" or unit_byte.lower() == "megabyte": - byte = bit * kibibit_to_megabyte + byte_converted = byte * kibibit_to_megabit # Convert kibibit to Gigabyte elif unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": - byte = bit * kibibit_to_gigabyte + byte_converted = byte * kibibit_to_gigabit - return byte + return byte_converted def extract_number_and_unit(input_string: str) -> tuple: """ - Extracts the number and unit from a given string. + :Summary: + Extracts the number and unit from a given string. Args: - input_string (str): The string from which to extract the number. + input_string (str): The string from which to extract the number. - :return: A tuple containing the number and the unit. - :rtype: tuple (number: int, unit: str) + :Return: A tuple containing the number and the unit. + :rtype: tuple (number: int, unit: str) """ # Use a regular expression to extract the number and the unit diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index bb01dbf9..9e7581f7 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -22,7 +22,7 @@ from sos_trades_api.config import Config from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation from sos_trades_api.server.base_server import app, db -from sos_trades_api.tools.code_tools import extract_number_and_unit, convert_bit_into_byte +from sos_trades_api.tools.code_tools import extract_number_and_unit, convert_byte_into_byte_unit_targeted from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info """ @@ -59,11 +59,13 @@ def __update_database(self): """ # Infinite loop # The database connection is kept open + count_retry = 0 while self.__started: # Add an exception manager to ensure that database eoor will not # shut down calculation try: # Open a database context + count_retry += 1 with app.app_context(): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() config = Config() @@ -76,20 +78,19 @@ def __update_database(self): cpu_limits = '----' memory_limits = '----' unit_byte_to_conversion = "GB" - pod_execution_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"] + pod_exec_memory_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"]["memory"] + pod_exec_cpu_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"]["cpu"] - if pod_execution_limit_from_config is not None and pod_execution_limit_from_config["cpu"] is not None and pod_execution_limit_from_config["memory"]: + if pod_exec_memory_limit_from_config is not None and pod_exec_cpu_limit_from_config: # CPU limits - cpu_limits = str(''.join(re.findall(r'\d+', pod_execution_limit_from_config["cpu"]))) + cpu_limits = str(''.join(re.findall(r'\d+', pod_exec_cpu_limit_from_config))) # Retrieve and convert memory limits - memory_limits_from_config = pod_execution_limit_from_config["memory"] - - if "mi" in memory_limits_from_config.lower(): + if "mi" in pod_exec_memory_limit_from_config.lower(): unit_byte_to_conversion = "MB" # Retrieve and extract limit and its unit - memory_limits_bit, memory_limits_unit_bit = extract_number_and_unit(memory_limits_from_config) - memory_limits_byte_converted = convert_bit_into_byte(memory_limits_bit, memory_limits_unit_bit, + memory_limits_bit, memory_limits_unit_bit = extract_number_and_unit(pod_exec_memory_limit_from_config) + memory_limits_byte_converted = convert_byte_into_byte_unit_targeted(memory_limits_bit, memory_limits_unit_bit, unit_byte_to_conversion) if memory_limits_byte_converted is not None: memory_limits = round(memory_limits_byte_converted, 2) @@ -121,4 +122,6 @@ def __update_database(self): finally: # Wait 2 seconds before next metrics if self.__started: - time.sleep(2) \ No newline at end of file + time.sleep(2) + + print(f"retry = {count_retry}") \ No newline at end of file diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index a14d896d..4b24f857 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -20,7 +20,7 @@ from kubernetes import client, config, watch from sos_trades_api.server.base_server import app -from sos_trades_api.tools.code_tools import convert_bit_into_byte, extract_number_and_unit +from sos_trades_api.tools.code_tools import convert_byte_into_byte_unit_targeted, extract_number_and_unit """ Execution engine kubernete @@ -306,6 +306,7 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: st :return: dict with cpu usage (number of cpu) and memory usage (Go) """ + result = { "cpu": "----", "memory": "----", @@ -317,42 +318,76 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: st # Create k8 api client object kubernetes_load_kube_config() try: - api = client.CustomObjectsApi() - - while wait_time < max_wait_time: - async_request = api.list_namespaced_custom_object( - group="metrics.k8s.io", - version="v1beta1", - namespace=pod_namespace, - plural="pods", - async_req=True - ) - - while not async_request.ready(): - time.sleep(1) - - resources = async_request.get() - pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) - if len(pod_searched) > 0: - # Retrieve cpu (in nanocores) and unit and convert it in CPU - pod_cpu_nanocores, pod_cpu_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["cpu"]) - pod_cpu = round(pod_cpu_nanocores / 1e9, 2) + v1 = client.CoreV1Api() + pods = v1.list_namespaced_pod(pod_namespace) - # Retrieve memory usage and convert it to gigabit - pod_memory_kib, pod_memory_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["memory"]) - - pod_memory_converted = convert_bit_into_byte(pod_memory_kib, pod_memory_unit, unit_byte_to_conversion) - - result["cpu"] = pod_cpu - result["memory"] = round(pod_memory_converted, 2) + target_pod = None + for pod in pods.items: + if pod.metadata.name == pod_name: + target_pod = pod break - else: - time.sleep(polling_interval) - wait_time += polling_interval + if target_pod: + print(f"pod '{target_pod.metadata.name}' is '{target_pod.status.phase}'") + if target_pod.status.phase == "Running": + + api = client.CustomObjectsApi() + async_request = api.list_namespaced_custom_object( + group="metrics.k8s.io", + version="v1beta1", + namespace=pod_namespace, + plural="pods", + async_req=True + ) + + resources = async_request.get() + print(f"Pods list :{resources['items']}") + pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) + if len(pod_searched) > 0: + + # Retrieve cpu (in nanocores) and unit and convert it in CPU + pod_cpu_nanocores, pod_cpu_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["cpu"]) + pod_cpu = round(pod_cpu_nanocores / 1e9, 2) + + # Retrieve memory usage and convert it to gigabit + pod_memory_kib, pod_memory_unit = extract_number_and_unit(pod_searched[0]["containers"][0]["usage"]["memory"]) + + pod_memory_converted = convert_byte_into_byte_unit_targeted(pod_memory_kib, pod_memory_unit, unit_byte_to_conversion) + + result["cpu"] = pod_cpu + result["memory"] = round(pod_memory_converted, 2) + + + # # cgroup v1 + # memory_current_path = "/sys/fs/cgroup/memory.current" + # cpu_stat_path = "/sys/fs/cgroup/cpu.stat" + # + # # Commande pour lire le fichier memory.current + # command_memory = ["cat", memory_current_path] + # command_cpu = ["cat", cpu_stat_path] + # from kubernetes.stream import stream + # # Exécuter la commande dans le pod + # response_memory = stream(v1.connect_get_namespaced_pod_exec, + # pod_name, + # pod_namespace, + # command=command_memory, + # stderr=True, stdin=False, + # stdout=True, tty=False) + # memory_converted = convert_byte_into_byte_unit_targeted(int(response_memory), "octet", unit_byte_to_conversion) + # response_cpu = stream(v1.connect_get_namespaced_pod_exec, + # pod_name, + # pod_namespace, + # command=command_cpu, + # stderr=True, stdin=False, + # stdout=True, tty=False) + # x = create_stat_dict(response_cpu) + # usage_usec = x['usage_usec'] + # + # + # result["memory"] = round(memory_converted, 2) + # result["cpu"] = round(usage_usec / 1e6, 2) + - if wait_time >= max_wait_time: - print(f"Max wait time {max_wait_time}s to load metrics exceeded") except Exception as error: message = f"Unable to retrieve pod metrics: {error}" From e00c1a3db619ecf7e805ae03c15685192c28e3f4 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 3 Jul 2024 15:58:25 +0200 Subject: [PATCH 49/73] [test update metric] retrieve also metric from file system --- sos_trades_api/tools/code_tools.py | 92 +++++++++++------- .../tools/execution/execution_metrics.py | 2 +- sos_trades_api/tools/file_tools.py | 46 +++++++++ .../tools/kubernetes/kubernetes_service.py | 94 ++++++++++--------- 4 files changed, 153 insertions(+), 81 deletions(-) diff --git a/sos_trades_api/tools/code_tools.py b/sos_trades_api/tools/code_tools.py index 02719ae7..bc4d09e1 100644 --- a/sos_trades_api/tools/code_tools.py +++ b/sos_trades_api/tools/code_tools.py @@ -115,15 +115,15 @@ def file_tail(file_name, line_count, encoding="utf-8"): return lines -def convert_byte_into_byte_unit_targeted(byte: float, unit_bibit: str, unit_byte: str) -> float: +def convert_byte_into_byte_unit_targeted(bytes_to_convert: float, unit_source: str, unit_bytes: str) -> float: """ :Summary: Convert a given amount of bits into bytes based on specified units. :Args: - byte (float): The amount of bits to convert. - unit_bit (str): The unit of the input bit value. - unit_byte (str): The unit of the output byte value. + bytes_to_convert (float): The amount of bytes to convert. + unit_source (str): The unit of source bytes. + unit_bytes (str): The unit to convert bytes. :Return: The converted value in bytes. :rtype: float @@ -132,36 +132,59 @@ def convert_byte_into_byte_unit_targeted(byte: float, unit_bibit: str, unit_byte byte_converted = None # Conversion factors - kibibit_to_megabit = 1 / 976.6 - kibibit_to_gigabit = 1 / 976600 - mebibit_to_megabit = 1.049 - mebibit_to_gigabit = 1 / 953.7 - gibibit_to_gigabit = 1.074 - - if unit_bibit.lower() == "mi" or unit_bibit.lower() == "megabit": - - # Convert Megabit to Megabyte - if unit_byte.lower() == "mb" or unit_bibit.lower() == "megabyte": - byte_converted = byte * mebibit_to_megabit - - # Convert Megabit to Gigabyte - elif unit_byte == "gb" or unit_byte.lower() == "gigabyte": - byte_converted = byte * mebibit_to_gigabit - - elif unit_bibit.lower() == "gi" or unit_bibit.lower() == "gigabit": - - # Convert Gigabit to Gigabyte - if unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": - byte_converted = byte * gibibit_to_gigabit - - elif unit_bibit.lower() == "ki" or unit_bibit.lower() == "kibibit": - # Convert kibibit to Megabyte - if unit_byte.lower() == "mb" or unit_byte.lower() == "megabyte": - byte_converted = byte * kibibit_to_megabit - - # Convert kibibit to Gigabyte - elif unit_byte.lower() == "gb" or unit_byte.lower() == "gigabyte": - byte_converted = byte * kibibit_to_gigabit + bytes_to_megabyte = 1 / (1024 * 1024) + bytes_to_gigabyte = 1 / (1024 * 1024 * 1024) + kibibytes_to_megabytes = 1024/1000/1000 + kibibytes_to_gigabytes = 1024/1000/1000/1000 + mebibytes_to_megabytes = 1024*1024/1000/1000 + mebibytes_to_gigabytes = 1024*1024/1000/1000/1000 + gibibytes_to_gigabytes = 1024*1024*1024/1000/1000/1000 + + if unit_source.lower() == "byte": + # Convert byte to Megabyte + if unit_bytes.lower() == "mb" or unit_bytes.lower() == "megabyte": + byte_converted = bytes_to_convert * bytes_to_megabyte + + # Convert v to Gigabyte + elif unit_bytes.lower() == "gb" or unit_bytes.lower() == "gigabyte": + byte_converted = bytes_to_convert * bytes_to_gigabyte + else: + raise ValueError(f'Unit {unit_bytes} is not handled') + + elif unit_source.lower() == "ki" or unit_source.lower() == "kibibyte": + # Convert kibibyte to Megabyte + if unit_bytes.lower() == "mb" or unit_bytes.lower() == "megabyte": + byte_converted = bytes_to_convert * kibibytes_to_megabytes + + # Convert kibibyte to Gigabyte + elif unit_bytes.lower() == "gb" or unit_bytes.lower() == "gigabyte": + byte_converted = bytes_to_convert * kibibytes_to_gigabytes + else: + raise ValueError(f'Unit {unit_bytes} is not handled') + + elif unit_source.lower() == "mi" or unit_source.lower() == "mebibyte": + + # Convert Megabyte to Megabyte + if unit_bytes.lower() == "mb" or unit_bytes.lower() == "megabyte": + byte_converted = bytes_to_convert * mebibytes_to_megabytes + + # Convert Megabyte to Gigabyte + elif unit_bytes == "gb" or unit_bytes.lower() == "gigabyte": + byte_converted = bytes_to_convert * mebibytes_to_gigabytes + else: + raise ValueError(f'Unit {unit_bytes} is not handled') + + elif unit_source.lower() == "gi" or unit_source.lower() == "gibibyte": + + # Convert Gigabyte to Gigabyte + if unit_bytes.lower() == "gb" or unit_bytes.lower() == "gigabyte": + byte_converted = bytes_to_convert * gibibytes_to_gigabytes + else: + raise ValueError(f'Unit {unit_bytes} is not handled') + + + else: + raise ValueError(f'Unit {unit_source} is not handled') return byte_converted @@ -188,3 +211,4 @@ def extract_number_and_unit(input_string: str) -> tuple: unit = match.group(2) return number, unit + diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 9e7581f7..1fc13dc5 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -96,7 +96,7 @@ def __update_database(self): memory_limits = round(memory_limits_byte_converted, 2) # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion) + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion, int(cpu_limits)) cpu_metric = f'{result["cpu"]}/{cpu_limits}' memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_to_conversion}]' diff --git a/sos_trades_api/tools/file_tools.py b/sos_trades_api/tools/file_tools.py index 33f1cf33..a895be32 100644 --- a/sos_trades_api/tools/file_tools.py +++ b/sos_trades_api/tools/file_tools.py @@ -53,5 +53,51 @@ def read_object_in_json_file(file_path): return result +def retrieve_contain_from_file(file_path: str) -> list: + """ + :Summary: + Open a file and return its lines. + + :Args: + file_path (str): Path of file + + :Return: Lines of this file. + :rtype: list + """ + if file_path is not None and len(file_path.strip()) > 0: + if os.path.exists(file_path): + with open(file_path, 'r') as file: + lines = file.readlines() + if len(lines) > 0: + return lines + else: + raise FileExistsError(f"The file '{file_path}' is empty.") + else: + FileNotFoundError(f"The file '{file_path}' not found.") + else: + raise ValueError("The path cannot be none or empty.") + + +def get_cpu_usage_from_file(cpu_stat_path: str): + """ + :Summary: + Reads the CPU usage statistics from the specified cgroup CPU stat file. + This function opens the given file, reads its content, and extracts the + 'usage_usec' value which represents the total CPU time consumed by tasks + in this cgroup in microseconds. + + :Args: + cpu_stat_path (str): The file path to the cgroup CPU stat file. + + :Returns: + int: The total CPU usage in microseconds. + + """ + cpu_line = retrieve_contain_from_file(cpu_stat_path) + cpu_stat = {} + for line in cpu_line: + key, value = line.split() + cpu_stat[key] = int(value) + return cpu_stat['usage_usec'] diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 4b24f857..0cc59207 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -18,9 +18,11 @@ from functools import partial import urllib3 from kubernetes import client, config, watch +from datetime import datetime from sos_trades_api.server.base_server import app from sos_trades_api.tools.code_tools import convert_byte_into_byte_unit_targeted, extract_number_and_unit +from sos_trades_api.tools.file_tools import get_cpu_usage_from_file, retrieve_contain_from_file """ Execution engine kubernete @@ -292,28 +294,26 @@ def kubernetes_load_kube_config(): raise ExecutionEngineKuberneteError(message) -def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: str): +def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conversion: str, cpu_limits: int) -> dict: """ - get pod usage info like cpu and memory - :param pod_name: unique name of the pod => metadata.name - :type pod_name: str - :param pod_namespace: namespace where is the pod - :type pod_namespace: str + :Summary: + Get pod usage info like cpu and memory - :param unit_byte_to_conversion: unit in byte targeted - :type unit_byte_to_conversion: str + :Args: + pod_name (str): unique name of the pod => metadata.name + pod_namespace (str): namespace where is the pod + unit_byte_to_conversion (str) : unit in byte targeted + cpu_limits (int) : limit of cpu from configuration - :return: dict with cpu usage (number of cpu) and memory usage (Go) + :return: + dict of cpu usage and memory usage """ result = { "cpu": "----", "memory": "----", } - max_wait_time = 20 # second - wait_time = 0 - polling_interval = 1 # Create k8 api client object kubernetes_load_kube_config() @@ -330,17 +330,14 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: st if target_pod: print(f"pod '{target_pod.metadata.name}' is '{target_pod.status.phase}'") if target_pod.status.phase == "Running": - api = client.CustomObjectsApi() - async_request = api.list_namespaced_custom_object( + resources = api.list_namespaced_custom_object( group="metrics.k8s.io", version="v1beta1", namespace=pod_namespace, plural="pods", - async_req=True ) - resources = async_request.get() print(f"Pods list :{resources['items']}") pod_searched = list(filter(lambda pod: pod["metadata"]["name"] == pod_name, resources["items"])) if len(pod_searched) > 0: @@ -357,45 +354,50 @@ def kubernetes_get_pod_info(pod_name, pod_namespace, unit_byte_to_conversion: st result["cpu"] = pod_cpu result["memory"] = round(pod_memory_converted, 2) + return result + else: + cpu_stat_path = '/sys/fs/cgroup/cpu.stat' + # First measurement + start_time = datetime.now() + start_usage_usec = get_cpu_usage_from_file(cpu_stat_path) + + # Sleep for a short period + time.sleep(0.5) + + # Second measurement + end_time = datetime.now() + end_usage_usec = get_cpu_usage_from_file(cpu_stat_path) - # # cgroup v1 - # memory_current_path = "/sys/fs/cgroup/memory.current" - # cpu_stat_path = "/sys/fs/cgroup/cpu.stat" - # - # # Commande pour lire le fichier memory.current - # command_memory = ["cat", memory_current_path] - # command_cpu = ["cat", cpu_stat_path] - # from kubernetes.stream import stream - # # Exécuter la commande dans le pod - # response_memory = stream(v1.connect_get_namespaced_pod_exec, - # pod_name, - # pod_namespace, - # command=command_memory, - # stderr=True, stdin=False, - # stdout=True, tty=False) - # memory_converted = convert_byte_into_byte_unit_targeted(int(response_memory), "octet", unit_byte_to_conversion) - # response_cpu = stream(v1.connect_get_namespaced_pod_exec, - # pod_name, - # pod_namespace, - # command=command_cpu, - # stderr=True, stdin=False, - # stdout=True, tty=False) - # x = create_stat_dict(response_cpu) - # usage_usec = x['usage_usec'] - # - # - # result["memory"] = round(memory_converted, 2) - # result["cpu"] = round(usage_usec / 1e6, 2) + # Calculate elapsed time in seconds + elapsed_time_sec = (end_time - start_time).total_seconds() + # Calculate CPU usage in seconds + cpu_usage_seconds = (end_usage_usec - start_usage_usec) / 1e6 + # Calculate CPU usage percentage + cpu_usage = cpu_usage_seconds / elapsed_time_sec + + # Retrieve memory from file system + memory_path = "/sys/fs/cgroup/memory.current" + memory_lines = retrieve_contain_from_file(memory_path) + bytes_value = int(memory_lines[0]) + memory_converted = convert_byte_into_byte_unit_targeted(bytes_value, "byte", + unit_byte_to_conversion) + + result["memory"] = memory_converted + result["cpu"] = round(cpu_usage, 2) + + return result + else: + raise ExecutionEngineKuberneteError(f"Pod '{target_pod}' is not running. Status : {target_pod.status.phase}") + else: + raise ExecutionEngineKuberneteError(f"Pod '{pod_name}' not found") except Exception as error: message = f"Unable to retrieve pod metrics: {error}" app.logger.error(message) raise ExecutionEngineKuberneteError(message) - return result - def kubernetes_delete_deployment_and_service(pod_name, pod_namespace): """ delete service and deployment, this will kill the asssociated pod From fb3b896efaaa66d0fa96c4704fe9dc32bb68eae8 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Wed, 3 Jul 2024 16:58:30 +0200 Subject: [PATCH 50/73] [update metric] round the memory from file system --- .../tools/execution/execution_metrics.py | 16 +++++++--------- .../tools/kubernetes/kubernetes_service.py | 9 +++------ 2 files changed, 10 insertions(+), 15 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 1fc13dc5..257edf5b 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -59,13 +59,11 @@ def __update_database(self): """ # Infinite loop # The database connection is kept open - count_retry = 0 while self.__started: # Add an exception manager to ensure that database eoor will not # shut down calculation try: # Open a database context - count_retry += 1 with app.app_context(): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id.like(self.__study_case_execution_id)).first() config = Config() @@ -95,11 +93,13 @@ def __update_database(self): if memory_limits_byte_converted is not None: memory_limits = round(memory_limits_byte_converted, 2) - # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion, int(cpu_limits)) + # Retrieve memory and cpu from kubernetes + result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion) - cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_to_conversion}]' + cpu_metric = f'{result["cpu"]}/{cpu_limits}' + memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_to_conversion}]' + else: + raise ValueError('Limit from configuration not found') else: # Check environment info @@ -122,6 +122,4 @@ def __update_database(self): finally: # Wait 2 seconds before next metrics if self.__started: - time.sleep(2) - - print(f"retry = {count_retry}") \ No newline at end of file + time.sleep(2) \ No newline at end of file diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 0cc59207..58258f9e 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -294,7 +294,7 @@ def kubernetes_load_kube_config(): raise ExecutionEngineKuberneteError(message) -def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conversion: str, cpu_limits: int) -> dict: +def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conversion: str) -> dict: """ :Summary: @@ -310,10 +310,7 @@ def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conv dict of cpu usage and memory usage """ - result = { - "cpu": "----", - "memory": "----", - } + result = {} # Create k8 api client object kubernetes_load_kube_config() @@ -384,7 +381,7 @@ def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conv memory_converted = convert_byte_into_byte_unit_targeted(bytes_value, "byte", unit_byte_to_conversion) - result["memory"] = memory_converted + result["memory"] = round(memory_converted, 2) result["cpu"] = round(cpu_usage, 2) return result From 079ac5ef949caa6d2e198ea40e841b357112c852 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Wed, 3 Jul 2024 21:12:31 +0000 Subject: [PATCH 51/73] merge integration to validation From 64cdc4767c02e342d4de0d2b0dda69be981d78d8 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 4 Jul 2024 10:18:45 +0200 Subject: [PATCH 52/73] Get metrics (CPUI & RAM) from file system --- .../tools/execution/execution_metrics.py | 18 +++--- sos_trades_api/tools/file_tools.py | 60 ++++++++++++++++++- .../tools/kubernetes/kubernetes_service.py | 35 +---------- 3 files changed, 70 insertions(+), 43 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 257edf5b..331b8da2 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -23,7 +23,7 @@ from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation from sos_trades_api.server.base_server import app, db from sos_trades_api.tools.code_tools import extract_number_and_unit, convert_byte_into_byte_unit_targeted -from sos_trades_api.tools.kubernetes.kubernetes_service import kubernetes_get_pod_info +from sos_trades_api.tools.file_tools import get_metric_from_file_system """ Execution metric thread @@ -75,7 +75,7 @@ def __update_database(self): # Retrieve limits of pod from config cpu_limits = '----' memory_limits = '----' - unit_byte_to_conversion = "GB" + unit_byte_targeted = "GB" pod_exec_memory_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"]["memory"] pod_exec_cpu_limit_from_config = app.config[Config.CONFIG_FLAVOR_KUBERNETES][Config.CONFIG_FLAVOR_POD_EXECUTION][study_case_allocation.flavor]["limits"]["cpu"] @@ -84,20 +84,22 @@ def __update_database(self): cpu_limits = str(''.join(re.findall(r'\d+', pod_exec_cpu_limit_from_config))) # Retrieve and convert memory limits if "mi" in pod_exec_memory_limit_from_config.lower(): - unit_byte_to_conversion = "MB" + unit_byte_targeted = "MB" # Retrieve and extract limit and its unit memory_limits_bit, memory_limits_unit_bit = extract_number_and_unit(pod_exec_memory_limit_from_config) memory_limits_byte_converted = convert_byte_into_byte_unit_targeted(memory_limits_bit, memory_limits_unit_bit, - unit_byte_to_conversion) + unit_byte_targeted) if memory_limits_byte_converted is not None: memory_limits = round(memory_limits_byte_converted, 2) - # Retrieve memory and cpu from kubernetes - result = kubernetes_get_pod_info(study_case_allocation.kubernetes_pod_name, study_case_allocation.kubernetes_pod_namespace, unit_byte_to_conversion) + # Retrieve memory and cpu from file system + memory_file_path = "/sys/fs/cgroup/memory.current" + cpu_file_path = "/sys/fs/cgroup/cpu.stat" + memory_usage, cpu_usage = get_metric_from_file_system(memory_file_path, cpu_file_path, unit_byte_targeted) - cpu_metric = f'{result["cpu"]}/{cpu_limits}' - memory_metric = f'{result["memory"]}/{memory_limits} [{unit_byte_to_conversion}]' + cpu_metric = f'{cpu_usage}/{cpu_limits}' + memory_metric = f'{memory_usage}/{memory_limits} [{unit_byte_targeted}]' else: raise ValueError('Limit from configuration not found') diff --git a/sos_trades_api/tools/file_tools.py b/sos_trades_api/tools/file_tools.py index a895be32..1f7df149 100644 --- a/sos_trades_api/tools/file_tools.py +++ b/sos_trades_api/tools/file_tools.py @@ -16,8 +16,11 @@ ''' import json import os +import time +from datetime import datetime from sos_trades_api.models.custom_json_encoder import CustomJsonEncoder +from sos_trades_api.tools.code_tools import convert_byte_into_byte_unit_targeted """ mode: python; py-indent-offset: 4; tab-width: 4; coding: utf-8 @@ -53,7 +56,60 @@ def read_object_in_json_file(file_path): return result -def retrieve_contain_from_file(file_path: str) -> list: +def get_metric_from_file_system(memory_file_path: str, cpu_file_path: str, unit_byte_to_conversion: str) -> tuple: + """ + :Summary: + Retrieves memory and CPU metrics from the file system and converts them to the specified unit. + + :Args: + memory_file_path (str): The file path to the memory stat file. + cpu_file_path (str): The file path to the CPU stat file. + unit_byte_to_conversion (str): The target unit for memory conversion. + + :Returns: + tuple: A tuple containing the converted memory usage and the CPU usage percentage. + + """ + if not memory_file_path or not memory_file_path.strip(): + raise ValueError("The memory file path cannot be none or empty.") + if not cpu_file_path or not cpu_file_path.strip(): + raise ValueError("The CPU file path cannot be none or empty.") + if not unit_byte_to_conversion or not unit_byte_to_conversion.strip(): + raise ValueError("The unit for memory conversion cannot be none or empty.") + + # Retrieve CPU usage from file system + # First measurement + start_time = datetime.now() + start_usage_usec = get_cpu_usage_from_file(cpu_file_path) + + # Sleep for a short period + time.sleep(0.5) + + # Second measurement + end_time = datetime.now() + end_usage_usec = get_cpu_usage_from_file(cpu_file_path) + + # Calculate elapsed time in seconds + elapsed_time_sec = (end_time - start_time).total_seconds() + + # Calculate CPU usage in seconds + cpu_usage_seconds = (end_usage_usec - start_usage_usec) / 1e6 + + # Calculate CPU usage percentage + cpu_usage = (cpu_usage_seconds / elapsed_time_sec) * 100 + + # Retrieve memory from file system + memory_lines = get_lines_from_file(memory_file_path) + if not memory_lines or not memory_lines[0].strip(): + raise FileExistsError(f"The file '{memory_file_path}' is empty or invalid.") + + bytes_value = int(memory_lines[0]) + memory_converted = convert_byte_into_byte_unit_targeted(bytes_value, "byte", unit_byte_to_conversion) + + return round(memory_converted, 2), round(cpu_usage, 2) + + +def get_lines_from_file(file_path: str) -> list: """ :Summary: Open a file and return its lines. @@ -93,7 +149,7 @@ def get_cpu_usage_from_file(cpu_stat_path: str): int: The total CPU usage in microseconds. """ - cpu_line = retrieve_contain_from_file(cpu_stat_path) + cpu_line = get_lines_from_file(cpu_stat_path) cpu_stat = {} for line in cpu_line: key, value = line.split() diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 58258f9e..1b72a72f 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -353,42 +353,11 @@ def kubernetes_get_pod_info(pod_name: str, pod_namespace: str, unit_byte_to_conv return result else: - cpu_stat_path = '/sys/fs/cgroup/cpu.stat' - # First measurement - start_time = datetime.now() - start_usage_usec = get_cpu_usage_from_file(cpu_stat_path) - - # Sleep for a short period - time.sleep(0.5) - - # Second measurement - end_time = datetime.now() - end_usage_usec = get_cpu_usage_from_file(cpu_stat_path) - - # Calculate elapsed time in seconds - elapsed_time_sec = (end_time - start_time).total_seconds() - - # Calculate CPU usage in seconds - cpu_usage_seconds = (end_usage_usec - start_usage_usec) / 1e6 - - # Calculate CPU usage percentage - cpu_usage = cpu_usage_seconds / elapsed_time_sec - - # Retrieve memory from file system - memory_path = "/sys/fs/cgroup/memory.current" - memory_lines = retrieve_contain_from_file(memory_path) - bytes_value = int(memory_lines[0]) - memory_converted = convert_byte_into_byte_unit_targeted(bytes_value, "byte", - unit_byte_to_conversion) - - result["memory"] = round(memory_converted, 2) - result["cpu"] = round(cpu_usage, 2) - - return result + raise ExecutionEngineKuberneteError(f"Pod '{pod_name}' from CustomObjectsApi not found") else: raise ExecutionEngineKuberneteError(f"Pod '{target_pod}' is not running. Status : {target_pod.status.phase}") else: - raise ExecutionEngineKuberneteError(f"Pod '{pod_name}' not found") + raise ExecutionEngineKuberneteError(f"Pod '{pod_name}' from CoreV1Api not found") except Exception as error: message = f"Unable to retrieve pod metrics: {error}" From 8dc7e2300104a4f7214384847f277e1819336810 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 4 Jul 2024 10:19:39 +0200 Subject: [PATCH 53/73] remove import not used --- sos_trades_api/tools/kubernetes/kubernetes_service.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index 1b72a72f..de5001d5 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -18,11 +18,9 @@ from functools import partial import urllib3 from kubernetes import client, config, watch -from datetime import datetime from sos_trades_api.server.base_server import app from sos_trades_api.tools.code_tools import convert_byte_into_byte_unit_targeted, extract_number_and_unit -from sos_trades_api.tools.file_tools import get_cpu_usage_from_file, retrieve_contain_from_file """ Execution engine kubernete From 5050494dc4a7bb34b255d0035ba7a69cc17e5395 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 4 Jul 2024 15:16:55 +0200 Subject: [PATCH 54/73] [update metrics ] Convert cpu limits in core if needed --- sos_trades_api/tools/execution/execution_metrics.py | 10 +++++++++- sos_trades_api/tools/file_tools.py | 2 +- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 331b8da2..61eb830e 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -81,7 +81,12 @@ def __update_database(self): if pod_exec_memory_limit_from_config is not None and pod_exec_cpu_limit_from_config: # CPU limits - cpu_limits = str(''.join(re.findall(r'\d+', pod_exec_cpu_limit_from_config))) + cpu_limits = pod_exec_cpu_limit_from_config + if "m" in cpu_limits: + cpu_millicore, cpu_limits_unit = extract_number_and_unit(pod_exec_cpu_limit_from_config) + # Convert cpu in core + cpu_limits = cpu_millicore / 1000 + # Retrieve and convert memory limits if "mi" in pod_exec_memory_limit_from_config.lower(): unit_byte_targeted = "MB" @@ -98,6 +103,9 @@ def __update_database(self): cpu_file_path = "/sys/fs/cgroup/cpu.stat" memory_usage, cpu_usage = get_metric_from_file_system(memory_file_path, cpu_file_path, unit_byte_targeted) + if memory_usage is None or cpu_usage is None: + raise ValueError('Metrics from file system not found') + cpu_metric = f'{cpu_usage}/{cpu_limits}' memory_metric = f'{memory_usage}/{memory_limits} [{unit_byte_targeted}]' else: diff --git a/sos_trades_api/tools/file_tools.py b/sos_trades_api/tools/file_tools.py index 1f7df149..9c39b838 100644 --- a/sos_trades_api/tools/file_tools.py +++ b/sos_trades_api/tools/file_tools.py @@ -96,7 +96,7 @@ def get_metric_from_file_system(memory_file_path: str, cpu_file_path: str, unit_ cpu_usage_seconds = (end_usage_usec - start_usage_usec) / 1e6 # Calculate CPU usage percentage - cpu_usage = (cpu_usage_seconds / elapsed_time_sec) * 100 + cpu_usage = (cpu_usage_seconds / elapsed_time_sec) # Retrieve memory from file system memory_lines = get_lines_from_file(memory_file_path) From dd71bd70247445041670d73ffd493d3daa3e06d2 Mon Sep 17 00:00:00 2001 From: Geoffrey Delric <81676743+gdelric-capgemini@users.noreply.github.com> Date: Thu, 4 Jul 2024 17:18:39 +0200 Subject: [PATCH 55/73] Add last metric recorded --- .../controllers/sostrades_main/study_case_controller.py | 2 ++ sos_trades_api/models/study_case_dto.py | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/sos_trades_api/controllers/sostrades_main/study_case_controller.py b/sos_trades_api/controllers/sostrades_main/study_case_controller.py index b33f165a..d4bf7830 100644 --- a/sos_trades_api/controllers/sostrades_main/study_case_controller.py +++ b/sos_trades_api/controllers/sostrades_main/study_case_controller.py @@ -341,6 +341,8 @@ def load_study_case(study_id, study_access_right, user_id, reload=False): study_case_execution = StudyCaseExecution.query.filter(StudyCaseExecution.id == study_case.current_execution_id).first() loaded_study_case.study_case.execution_status = study_case_execution.execution_status + loaded_study_case.study_case.last_memory_usage = study_case_execution.memory_usage + loaded_study_case.study_case.last_cpu_usage = study_case_execution.cpu_usage app.logger.info(f"load_study_case {study_id}, get cache: {cache_duration}") diff --git a/sos_trades_api/models/study_case_dto.py b/sos_trades_api/models/study_case_dto.py index 6da6dfdb..a82c1ace 100644 --- a/sos_trades_api/models/study_case_dto.py +++ b/sos_trades_api/models/study_case_dto.py @@ -64,6 +64,8 @@ def __init__(self, study_case_instance=None, owner_group=None): self.study_pod_flavor = None self.execution_pod_flavor = None self.generation_pod_flavor = None + self.last_memory_usage = "" + self.last_cpu_usage = "" if study_case_instance is not None: self.id = study_case_instance.id @@ -151,6 +153,8 @@ def serialize(self): result.update({"study_pod_flavor": self.study_pod_flavor}) result.update({"execution_pod_flavor": self.execution_pod_flavor}) result.update({"generation_pod_flavor": self.generation_pod_flavor}) + result.update({"last_memory_usage": self.last_memory_usage}) + result.update({"last_cpu_usage": self.last_cpu_usage}) return result From 236969d4ede51ed206a7d58ef266c17bd7c23259 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Thu, 4 Jul 2024 21:12:32 +0000 Subject: [PATCH 56/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index c1b7ec9a..25df4eb5 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Wed Jul 3 21:12:31 UTC 2024 +Thu Jul 4 21:12:32 UTC 2024 From 71e7c2b57a5f705d8b40e0adf9624dd727bb0596 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Thu, 4 Jul 2024 21:12:32 +0000 Subject: [PATCH 57/73] merge integration to validation From 68d81126a237054f00337cf552ff7dca858c6b76 Mon Sep 17 00:00:00 2001 From: magueylard Date: Fri, 5 Jul 2024 10:37:32 +0200 Subject: [PATCH 58/73] fix ruff errors --- sos_trades_api/tools/execution/execution_metrics.py | 9 ++++++--- sos_trades_api/tools/kubernetes/kubernetes_service.py | 6 +++++- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/sos_trades_api/tools/execution/execution_metrics.py b/sos_trades_api/tools/execution/execution_metrics.py index 61eb830e..4791d55a 100644 --- a/sos_trades_api/tools/execution/execution_metrics.py +++ b/sos_trades_api/tools/execution/execution_metrics.py @@ -16,13 +16,16 @@ ''' import threading import time -import re + import psutil from sos_trades_api.config import Config -from sos_trades_api.models.database_models import StudyCaseExecution, PodAllocation +from sos_trades_api.models.database_models import PodAllocation, StudyCaseExecution from sos_trades_api.server.base_server import app, db -from sos_trades_api.tools.code_tools import extract_number_and_unit, convert_byte_into_byte_unit_targeted +from sos_trades_api.tools.code_tools import ( + convert_byte_into_byte_unit_targeted, + extract_number_and_unit, +) from sos_trades_api.tools.file_tools import get_metric_from_file_system """ diff --git a/sos_trades_api/tools/kubernetes/kubernetes_service.py b/sos_trades_api/tools/kubernetes/kubernetes_service.py index de5001d5..66e3e66b 100644 --- a/sos_trades_api/tools/kubernetes/kubernetes_service.py +++ b/sos_trades_api/tools/kubernetes/kubernetes_service.py @@ -16,11 +16,15 @@ ''' import time from functools import partial + import urllib3 from kubernetes import client, config, watch from sos_trades_api.server.base_server import app -from sos_trades_api.tools.code_tools import convert_byte_into_byte_unit_targeted, extract_number_and_unit +from sos_trades_api.tools.code_tools import ( + convert_byte_into_byte_unit_targeted, + extract_number_and_unit, +) """ Execution engine kubernete From e6c9582392662273d401ed9d418205f1cae8d7b8 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Fri, 5 Jul 2024 21:11:34 +0000 Subject: [PATCH 59/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index 25df4eb5..44bb80fe 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Thu Jul 4 21:12:32 UTC 2024 +Fri Jul 5 21:11:34 UTC 2024 From 7751c1d8ba156428b37a47f079a62defbb68bc11 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Fri, 5 Jul 2024 21:11:34 +0000 Subject: [PATCH 60/73] merge integration to validation From 32cc84491e0764b63bbc9870e71885b9a926e7c2 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Mon, 8 Jul 2024 09:59:22 +0200 Subject: [PATCH 61/73] Update urllib to fix CVE --- CREDITS.rst | 2 +- requirements.in | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CREDITS.rst b/CREDITS.rst index 63684b25..f3c87948 100644 --- a/CREDITS.rst +++ b/CREDITS.rst @@ -90,7 +90,7 @@ sostrades-webapi depends on software with compatible licenses that are listed be `SQLAlchemy (1.3.13) `_ MIT -`urllib3 (2.1.0) `_ +`urllib3 (2.2.2) `_ MIT License `werkzeug (2.0.3) `_ diff --git a/requirements.in b/requirements.in index 86ead867..b2519d89 100644 --- a/requirements.in +++ b/requirements.in @@ -34,7 +34,7 @@ PyYAML==6.0.1 requests==2.32.3 simplejson==3.19.2 SQLAlchemy==1.3.13 -urllib3==2.1.0 +urllib3==2.2.2 werkzeug==2.0.3 # Development requirements From 3b0ea253f9f8feaa4ac00499d3389754219a101c Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Mon, 8 Jul 2024 21:11:37 +0000 Subject: [PATCH 62/73] merge integration to validation From 535fd96f47447e4f32cd7805ec09fdc612ccf402 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 9 Jul 2024 14:52:12 +0200 Subject: [PATCH 63/73] Updated version of some libraries --- CREDITS.rst | 6 +++--- requirements.in | 10 +++++----- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/CREDITS.rst b/CREDITS.rst index f3c87948..20cd43da 100644 --- a/CREDITS.rst +++ b/CREDITS.rst @@ -6,7 +6,7 @@ sostrades-webapi depends on software with compatible licenses that are listed be `click (8.1.7) `_ BSD-3-Clause -`python-dotenv (0.12.0) `_ +`python-dotenv (1.0.1) `_ BSD-3-Clause `eventlet (0.33.3) `_ @@ -54,7 +54,7 @@ sostrades-webapi depends on software with compatible licenses that are listed be `kubernetes (29.0.0) `_ Apache License Version 2.0 -`mysqlclient (2.2.0) `_ +`mysqlclient (2.2.4) `_ GNU General Public License v2 (GPLv2) `numpy (1.24.4) `_ @@ -66,7 +66,7 @@ sostrades-webapi depends on software with compatible licenses that are listed be `plotly (5.3.0) `_ MIT -`psutil (5.9.5) `_ +`psutil (psutil) `_ BSD-3-Clause `python-ldap (3.3.0) `_ diff --git a/requirements.in b/requirements.in index b2519d89..cb9d1f0a 100644 --- a/requirements.in +++ b/requirements.in @@ -16,20 +16,20 @@ graphviz==0.16 itsdangerous==2.0.1 # Necessary to be compatible with flask 1.1.1 jinja2==3.0.1 # Necessary to be compatible with flask 1.1.1 kubernetes==29.0.0 -mysqlclient==2.2.0 +mysqlclient==2.2.4 numpy==1.24.4 pandas==2.2.2 plotly==5.3.0 -psutil==5.9.5 +psutil==6.0.0 PyJWT==1.7.1 -python-dotenv==0.12.0 +python-dotenv==1.0.1 python-ldap==3.4.0; platform_system!='Windows' https://download.lfd.uci.edu/pythonlibs/archived/python_ldap-3.4.0-cp39-cp39-win_amd64.whl; platform_system=='Windows' -python-keycloak==4.0.0 +python-keycloak==4.2.0 python-engineio==4.5.1 # Must be added to freeze version, so message server works python-socketio==5.8.0 # Must be added to freeze version, so message server works python3-saml==1.9.0 -pytz==2023.3.post1 +pytz==2024.1 PyYAML==6.0.1 requests==2.32.3 simplejson==3.19.2 From cd44ead77f7781e47e9ef68b6955172a6ae1e972 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 9 Jul 2024 15:32:32 +0200 Subject: [PATCH 64/73] Rollback mysql version as latest has issues --- requirements.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.in b/requirements.in index cb9d1f0a..c48990d3 100644 --- a/requirements.in +++ b/requirements.in @@ -16,7 +16,7 @@ graphviz==0.16 itsdangerous==2.0.1 # Necessary to be compatible with flask 1.1.1 jinja2==3.0.1 # Necessary to be compatible with flask 1.1.1 kubernetes==29.0.0 -mysqlclient==2.2.4 +mysqlclient==2.2.3 numpy==1.24.4 pandas==2.2.2 plotly==5.3.0 From e866a816fdbc129c27eebf1d35d6e403c45c2f58 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 9 Jul 2024 16:06:31 +0200 Subject: [PATCH 65/73] Rollback mysqlclient lib causing issues in connect --- requirements.in | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/requirements.in b/requirements.in index c48990d3..91268871 100644 --- a/requirements.in +++ b/requirements.in @@ -16,7 +16,7 @@ graphviz==0.16 itsdangerous==2.0.1 # Necessary to be compatible with flask 1.1.1 jinja2==3.0.1 # Necessary to be compatible with flask 1.1.1 kubernetes==29.0.0 -mysqlclient==2.2.3 +mysqlclient==2.2.0 # Issue when moving up due to https://stackoverflow.com/questions/76688014/error-connections-using-insecure-transport-are-prohibited-while-require-secur numpy==1.24.4 pandas==2.2.2 plotly==5.3.0 @@ -38,7 +38,9 @@ urllib3==2.2.2 werkzeug==2.0.3 # Development requirements -pytest==7.4.3 -pytest-cov==4.1.0 -pytest-xdist==3.4.0 +# breaking change in 8.2.0 https://docs.pytest.org/en/stable/changelog.html#pytest-8-2-0-2024-04-27 +# Will be fixed in 8.3.0 https://github.com/pytest-dev/pytest/issues/12275#issuecomment-2108348204 +pytest==8.1.2 +pytest-cov==5.0.0 +pytest-xdist==3.6.1 pytest-durations==1.2.0 From a2f6244654781836953f03c9295f086c1608b71b Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Tue, 9 Jul 2024 16:12:58 +0200 Subject: [PATCH 66/73] Pytest version back to 7.4.3 for witness compatibility --- requirements.in | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/requirements.in b/requirements.in index 91268871..1b9b3eb0 100644 --- a/requirements.in +++ b/requirements.in @@ -38,9 +38,7 @@ urllib3==2.2.2 werkzeug==2.0.3 # Development requirements -# breaking change in 8.2.0 https://docs.pytest.org/en/stable/changelog.html#pytest-8-2-0-2024-04-27 -# Will be fixed in 8.3.0 https://github.com/pytest-dev/pytest/issues/12275#issuecomment-2108348204 -pytest==8.1.2 -pytest-cov==5.0.0 -pytest-xdist==3.6.1 +pytest==7.4.3 +pytest-cov==4.1.0 +pytest-xdist==3.4.0 pytest-durations==1.2.0 From 0b8774346a467ae57e77c1d82fcbe13bf88b4b2f Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Tue, 9 Jul 2024 21:11:33 +0000 Subject: [PATCH 67/73] merge integration to validation From 3e4a8cafd198b7d9493326e0a4468002f6451a46 Mon Sep 17 00:00:00 2001 From: GOYON Guillaume Date: Wed, 10 Jul 2024 10:37:26 +0200 Subject: [PATCH 68/73] Added information about the version of kubernetes --- requirements.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.in b/requirements.in index 1b9b3eb0..68c0c9f8 100644 --- a/requirements.in +++ b/requirements.in @@ -15,7 +15,7 @@ gitpython==3.1.43 graphviz==0.16 itsdangerous==2.0.1 # Necessary to be compatible with flask 1.1.1 jinja2==3.0.1 # Necessary to be compatible with flask 1.1.1 -kubernetes==29.0.0 +kubernetes==29.0.0 # Linked to kubernetes version deployed mysqlclient==2.2.0 # Issue when moving up due to https://stackoverflow.com/questions/76688014/error-connections-using-insecure-transport-are-prohibited-while-require-secur numpy==1.24.4 pandas==2.2.2 From 03bbd5a99754bf802c2340f90911e6765ceef461 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Wed, 10 Jul 2024 13:05:37 +0000 Subject: [PATCH 69/73] merge integration to validation From a44448d9ccb340759110b09c2ac2cdc65c8f6e06 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Thu, 11 Jul 2024 09:25:04 +0000 Subject: [PATCH 70/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index 84e4c882..a6d671ba 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Wed Jul 10 13:05:37 UTC 2024 +Thu Jul 11 09:25:04 UTC 2024 From 8268fc9b3dfb9f9260e01009d538fc58ef11f260 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Thu, 11 Jul 2024 09:25:04 +0000 Subject: [PATCH 71/73] merge integration to validation From 0ee865d3c85a7cda28f9c75f24962f68d00fdb15 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Fri, 12 Jul 2024 12:57:46 +0000 Subject: [PATCH 72/73] Merge integration to validation and update version.info --- sos_trades_api/version.info | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sos_trades_api/version.info b/sos_trades_api/version.info index a6d671ba..094e0bc9 100644 --- a/sos_trades_api/version.info +++ b/sos_trades_api/version.info @@ -1 +1 @@ -Thu Jul 11 09:25:04 UTC 2024 +Fri Jul 12 12:57:46 UTC 2024 From e4a46bb05a42b6dc1cd5f9426ac942f086cd4893 Mon Sep 17 00:00:00 2001 From: b4pm-devops Date: Fri, 12 Jul 2024 12:57:46 +0000 Subject: [PATCH 73/73] merge integration to validation