From 939585fac329da75071079e263ae1e7a4e3f82a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?L=C3=A9on=20Kuchenbecker?= Date: Wed, 8 Nov 2023 12:58:27 +0000 Subject: [PATCH] Python manual fixes --- .local/microservice-repository-template | 1 + src/ghga_datasteward_kit/batch_s3_upload.py | 6 ++++-- src/ghga_datasteward_kit/models.py | 1 + src/ghga_datasteward_kit/s3_upload/config.py | 16 +++++----------- src/ghga_datasteward_kit/s3_upload/downloader.py | 4 ++-- src/ghga_datasteward_kit/s3_upload/entrypoint.py | 4 +--- src/ghga_datasteward_kit/s3_upload/uploader.py | 4 ++-- src/ghga_datasteward_kit/s3_upload/utils.py | 2 ++ tests/test_file_ingest.py | 4 +++- 9 files changed, 21 insertions(+), 21 deletions(-) create mode 160000 .local/microservice-repository-template diff --git a/.local/microservice-repository-template b/.local/microservice-repository-template new file mode 160000 index 0000000..fef58b7 --- /dev/null +++ b/.local/microservice-repository-template @@ -0,0 +1 @@ +Subproject commit fef58b7eeac1ca8205c04b04b34f3d28400d28cc diff --git a/src/ghga_datasteward_kit/batch_s3_upload.py b/src/ghga_datasteward_kit/batch_s3_upload.py index 86fb370..ed6fa6f 100755 --- a/src/ghga_datasteward_kit/batch_s3_upload.py +++ b/src/ghga_datasteward_kit/batch_s3_upload.py @@ -115,11 +115,13 @@ def trigger_file_upload( return None logging.info("The upload of the file with alias '%s' has started.", file.alias) - return subprocess.Popen(command_line, shell=True, executable="/bin/bash") # nosec + return subprocess.Popen( + command_line, shell=True, executable="/bin/bash" # noqa: S602 + ) # pylint: disable=too-many-nested-blocks,too-many-branches, too-many-arguments -def handle_file_uploads( # noqa: R0913,C901 +def handle_file_uploads( # noqa: PLR0913, PLR0912 files: list[FileMetadata], output_dir: Path, config_path: Path, diff --git a/src/ghga_datasteward_kit/models.py b/src/ghga_datasteward_kit/models.py index 22294a3..44f78e4 100644 --- a/src/ghga_datasteward_kit/models.py +++ b/src/ghga_datasteward_kit/models.py @@ -36,6 +36,7 @@ def __init__(self): self.encrypted_sha256: list[str] = [] def __repr__(self) -> str: + """Returns a human readable representation of the Checksums object.""" return ( f"Unencrypted: {self.unencrypted_sha256.hexdigest()}\n" + f"Encrypted MD5: {self.encrypted_md5}\n" diff --git a/src/ghga_datasteward_kit/s3_upload/config.py b/src/ghga_datasteward_kit/s3_upload/config.py index 0a885b3..253e8f2 100644 --- a/src/ghga_datasteward_kit/s3_upload/config.py +++ b/src/ghga_datasteward_kit/s3_upload/config.py @@ -23,8 +23,8 @@ def expand_env_vars_in_path(path: Path) -> Path: """Expand environment variables in a Path.""" - with subprocess.Popen( # nosec - f"realpath {path}", shell=True, stdout=subprocess.PIPE + with subprocess.Popen( + f"realpath {path}", shell=True, stdout=subprocess.PIPE # noqa: S602 ) as process: if process.wait() != 0 or not process.stdout: raise RuntimeError(f"Parsing of path failed: {path}") @@ -35,9 +35,7 @@ def expand_env_vars_in_path(path: Path) -> Path: class LegacyConfig(BaseSettings): - """ - Required options for legacy file uploads. - """ + """Required options for legacy file uploads.""" s3_endpoint_url: SecretStr = Field( ..., description="URL of the local data hub's S3 server." @@ -78,17 +76,13 @@ class LegacyConfig(BaseSettings): ) @validator("output_dir") - def expand_env_vars_output_dir( - cls, output_dir: Path - ): # pylint: disable=no-self-argument + def expand_env_vars_output_dir(cls, output_dir: Path): # noqa: N805 """Expand vars in path""" return expand_env_vars_in_path(output_dir) class Config(LegacyConfig): - """ - Required options for file uploads. - """ + """Required options for file uploads.""" secret_ingest_pubkey: str = Field( ..., diff --git a/src/ghga_datasteward_kit/s3_upload/downloader.py b/src/ghga_datasteward_kit/s3_upload/downloader.py index 23bfc3a..7a5a06c 100644 --- a/src/ghga_datasteward_kit/s3_upload/downloader.py +++ b/src/ghga_datasteward_kit/s3_upload/downloader.py @@ -33,7 +33,7 @@ class ChunkedDownloader: # pylint: disable=too-many-instance-attributes """Handler class dealing with download functionality""" - def __init__( # pylint: disable=too-many-arguments + def __init__( # noqa: PLR0913 self, config: LegacyConfig, file_id: str, @@ -88,7 +88,7 @@ async def download(self): async def validate_checksums(self, checkums: models.Checksums): """Confirm checksums for upload and download match""" - if not self.target_checksums.get() == checkums.get(): + if self.target_checksums.get() != checkums.get(): message = ( "Checksum mismatch:\n" + f"Upload:\n{checkums}\nDownload:\n{self.target_checksums}\n" diff --git a/src/ghga_datasteward_kit/s3_upload/entrypoint.py b/src/ghga_datasteward_kit/s3_upload/entrypoint.py index 2d832d9..e47e1b9 100755 --- a/src/ghga_datasteward_kit/s3_upload/entrypoint.py +++ b/src/ghga_datasteward_kit/s3_upload/entrypoint.py @@ -14,9 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Contains functionality to actually run the S3 upload. -""" +"""Contains functionality to actually run the S3 upload.""" import asyncio import base64 diff --git a/src/ghga_datasteward_kit/s3_upload/uploader.py b/src/ghga_datasteward_kit/s3_upload/uploader.py index d289323..703afc3 100644 --- a/src/ghga_datasteward_kit/s3_upload/uploader.py +++ b/src/ghga_datasteward_kit/s3_upload/uploader.py @@ -35,7 +35,7 @@ class ChunkedUploader: # pylint: disable=too-many-instance-attributes """Handler class dealing with upload functionality""" - def __init__( # pylint: disable=too-many-arguments + def __init__( # noqa: PLR0913 self, input_path: Path, alias: str, @@ -97,7 +97,7 @@ async def encrypt_and_upload(self): class MultipartUpload: """Context manager to handle init + complete/abort for S3 multipart upload""" - def __init__( # pylint: disable=too-many-arguments + def __init__( # noqa: PLR0913 self, config: LegacyConfig, file_id: str, diff --git a/src/ghga_datasteward_kit/s3_upload/utils.py b/src/ghga_datasteward_kit/s3_upload/utils.py index 6a9b813..b522916 100644 --- a/src/ghga_datasteward_kit/s3_upload/utils.py +++ b/src/ghga_datasteward_kit/s3_upload/utils.py @@ -228,9 +228,11 @@ def __init__(self, *, config: LegacyConfig) -> None: self.storage = get_object_storage(config=config) async def __aenter__(self): + """The context manager enter function.""" return self async def __aexit__(self, exc_t, exc_v, exc_tb): + """The context manager exit function.""" # error handling while upload is still ongoing if isinstance( exc_v, (self.MultipartUploadCompletionError, self.PartUploadError) diff --git a/tests/test_file_ingest.py b/tests/test_file_ingest.py index 8a98aaf..40949b6 100644 --- a/tests/test_file_ingest.py +++ b/tests/test_file_ingest.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""File ingest tests.""" + import pytest import yaml from ghga_service_commons.utils.simple_token import generate_token @@ -43,7 +45,7 @@ async def test_alias_to_accession(legacy_ingest_fixture: IngestFixture): # noqa map_fields=legacy_ingest_fixture.config.map_files_fields, submission_store=submission_store, ) - example_accession = list( + example_accession = list( # noqa: RUF015 EXAMPLE_SUBMISSION.accession_map[ legacy_ingest_fixture.config.map_files_fields[0] ].values()