diff --git a/Dockerfile b/Dockerfile index 046a642..edbebf4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,9 +4,9 @@ ARG LAMBDA_TASK_ROOT=/var/task RUN apt-get update && apt-get upgrade -y && \ apt-get install --no-install-recommends --yes wget ca-certificates && \ - wget https://github.com/seqeralabs/tower-cli/releases/download/v0.5/tw-0.5-linux-x86_64 && \ - chmod +x ./tw-0.5-linux-x86_64 && \ - mv ./tw-0.5-linux-x86_64 /usr/local/bin/tw && \ + wget https://github.com/seqeralabs/tower-cli/releases/download/v0.9.2/tw-linux-x86_64 && \ + chmod +x ./tw-linux-x86_64 && \ + mv ./tw-linux-x86_64 /usr/local/bin/tw && \ rm -rf /var/lib/apt/lists/* WORKDIR ${LAMBDA_TASK_ROOT} diff --git a/app.py b/app.py index b3097f1..aa71f3a 100644 --- a/app.py +++ b/app.py @@ -107,7 +107,7 @@ def get_parameters(session=None, params_to_retrieve=None): underlying image or Lambda function code. Example: {'Parameter': - {'Name': '/lambda_tutorial/workspace_id', + {'Name': '/lambda_tutorial_adrian/workspace_id', 'Type': 'String', 'Value': '34830707738561', 'Version': 3, @@ -124,7 +124,7 @@ def get_parameters(session=None, params_to_retrieve=None): logger.debug(tw_params[param]) # Update logging_level based on logging_level key. - if param == '/lambda_tutorial/logging_level': + if param == '/lambda_tutorial_adrian/logging_level': desired_level = tw_params[param] if desired_level.upper() != "DEBUG": update_logging_level(desired_level=desired_level) @@ -148,13 +148,13 @@ def get_secrets(session=None): for a simpler implementation. Example: {'tower_PAT': - {'ARN': 'arn:aws:secretsmanager:YOUR_REGION:YOUR_ACCOUNT:secret:lambda_tutorial/tower_PAT-Abcdef', - 'Name': 'lambda_tutorial/tower_PAT', + {'ARN': 'arn:aws:secretsmanager:YOUR_REGION:YOUR_ACCOUNT:secret:lambda_tutorial_adrian/tower_PAT-Abcdef', + 'Name': 'lambda_tutorial_adrian/tower_PAT', 'SecretString': 'eyJ0.....', ... ''' tw_secrets = {} - secret_name = "lambda_tutorial/tower_PAT" + secret_name = "lambda_tutorial_adrian/tower_PAT" secrets_client: SecretsManagerClient = session.client('secretsmanager') try: @@ -205,7 +205,7 @@ def set_environment_variables(tw_params=None, tw_secrets=None): Set this before running tw transactions. ''' os.environ['TOWER_ACCESS_TOKEN'] = tw_secrets['tower_PAT'] - os.environ['TOWER_API_ENDPOINT'] = tw_params['/lambda_tutorial/tower_api_endpoint'] + os.environ['TOWER_API_ENDPOINT'] = tw_params['/lambda_tutorial_adrian/tower_api_endpoint'] def check_if_event_in_scope(event=None, tw_params=None): @@ -216,7 +216,7 @@ def check_if_event_in_scope(event=None, tw_params=None): 2) Boto3 S3 client list_objects requires the Prefix parameter to have a '/' at the end so we add this back in since the split function removes it Example of object key: - "lambda_tutorial/samplesheet_full.csv" + "lambda_tutorial_adrian/samplesheet_full.csv" ''' # Check if event should be processed or ignored. Cease processing if: # 1) Notification isn't from designated prefix. @@ -224,7 +224,7 @@ def check_if_event_in_scope(event=None, tw_params=None): event_key = event['Records'][0]['s3']['object']['key'] filetype = event_key.rsplit('.', 1)[1] - if not event_key.startswith(tw_params['/lambda_tutorial/s3_root_prefix']): + if not event_key.startswith(tw_params['/lambda_tutorial_adrian/s3_root_prefix']): # Event is out of scope and should not be retried. log_error_and_raise_exception( errorstring=f"Event key: {event_key} does not match designated prefix. Cease processing.", @@ -232,7 +232,7 @@ def check_if_event_in_scope(event=None, tw_params=None): retry_transaction=False ) - if filetype not in tw_params['/lambda_tutorial/samplesheet_file_types'].split(','): + if filetype not in tw_params['/lambda_tutorial_adrian/samplesheet_file_types'].split(','): # Event is out of scope and should not be retried. log_error_and_raise_exception( errorstring=f"Event key: {event_key} not a trigger file type. Cease processing.", @@ -253,7 +253,7 @@ def download_samplesheet(session=None, event=None): try: s3bucket = event['Records'][0]['s3']['bucket']['name'] s3key = event['Records'][0]['s3']['object']['key'] - # Example of key: "lambda_tutorial/complete.txt" + # Example of key: "lambda_tutorial_adrian/complete.txt" samplesheet_filename = s3key.rsplit('/')[1] dataset_name = samplesheet_filename.split('.')[0] @@ -296,7 +296,7 @@ def create_tower_dataset(local_samplesheet=None, dataset_name=None, event=None, s3key = event['Records'][0]['s3']['object']['key'] s3source = f"s3://{s3bucket}/{s3key}" - workspace_id = tw_params['/lambda_tutorial/workspace_id'] + workspace_id = tw_params['/lambda_tutorial_adrian/workspace_id'] description = f"Generated by Lambda {datetime.datetime.now().strftime('%Y-%m-%d %H:%M')} from {s3source}" # Python subprocess module works best with command split into array. @@ -340,8 +340,8 @@ def launch_tower_pipeline(datasetid=None, tw_params=None): 3) Invoke the target pipeline passing the parameter file with the defined input source. ''' # Extract parameters for inclusion in tw commands - workspace_id = tw_params['/lambda_tutorial/workspace_id'] - target_pipeline_name = tw_params['/lambda_tutorial/target_pipeline_name'] + workspace_id = tw_params['/lambda_tutorial_adrian/workspace_id'] + target_pipeline_name = tw_params['/lambda_tutorial_adrian/target_pipeline_name'] # Generate command for dataset URL retrieval (required for subsequent pipeline launch comand) command = f"tw -o json datasets url --workspace={workspace_id} --id={datasetid}" @@ -469,7 +469,7 @@ def update_logging_level(desired_level=None): logger.warning(f"Modified logging level to {desired_level}") else: # Create logger alert and stick with default DEBUG - logger.error(f"SSM parameter 'lambda_tutorial/logging_level' value {desired_level} is not a valid logging level. Continuing with DEBUG.") + logger.error(f"SSM parameter 'lambda_tutorial_adrian/logging_level' value {desired_level} is not a valid logging level. Continuing with DEBUG.") def handler(event, context): @@ -490,12 +490,12 @@ def handler(event, context): # Keynames are odd for a Python dictionary, but it works and aligns with required AWS set-up commands. # Keep logging_level as first entry to control logging behaviour of other values when retrieved. params_to_retrieve = [ - '/lambda_tutorial/logging_level', - '/lambda_tutorial/workspace_id', - '/lambda_tutorial/s3_root_prefix', - '/lambda_tutorial/samplesheet_file_types', - '/lambda_tutorial/target_pipeline_name', - "/lambda_tutorial/tower_api_endpoint" + '/lambda_tutorial_adrian/logging_level', + '/lambda_tutorial_adrian/workspace_id', + '/lambda_tutorial_adrian/s3_root_prefix', + '/lambda_tutorial_adrian/samplesheet_file_types', + '/lambda_tutorial_adrian/target_pipeline_name', + "/lambda_tutorial_adrian/tower_api_endpoint" ] tw_params = get_parameters(session=session, params_to_retrieve=params_to_retrieve) logger.debug(f"Parameters are: {tw_params}") diff --git a/entry_script.sh b/entry_script.sh index b5095ac..1be996d 100755 --- a/entry_script.sh +++ b/entry_script.sh @@ -2,7 +2,7 @@ # Reference: https://docs.aws.amazon.com/lambda/latest/dg/images-test.html if [ -z "${AWS_LAMBDA_RUNTIME_API}" ]; then - exec /usr/local/bin/aws-lambda-rie-x86_64 /usr/bin/python3 -m awslambdaric "$@" + exec /usr/local/bin/aws-lambda-rie-x86_64 /usr/local/bin/python -m awslambdaric "$@" else - exec /usr/bin/python3 -m awslambdaric "$@" + exec /usr/local/bin/python -m awslambdaric "$@" fi diff --git a/samplesheet_full.csv b/samplesheet_full.csv new file mode 100644 index 0000000..0fd83e7 --- /dev/null +++ b/samplesheet_full.csv @@ -0,0 +1,9 @@ +sample,fastq_1,fastq_2,strandedness +GM12878_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603629_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603629_T1_2.fastq.gz,reverse +GM12878_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603630_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603630_T1_2.fastq.gz,reverse +K562_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603392_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603392_T1_2.fastq.gz,reverse +K562_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603393_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603393_T1_2.fastq.gz,reverse +MCF7_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370490_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370490_T1_2.fastq.gz,reverse +MCF7_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370491_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370491_T1_2.fastq.gz,reverse +H1_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370468_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370468_T1_2.fastq.gz,reverse +H1_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370469_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370469_T1_2.fastq.gz,reverse diff --git a/testing/test_event_good.json b/testing/test_event_good.json index de94f10..7358249 100644 --- a/testing/test_event_good.json +++ b/testing/test_event_good.json @@ -3,7 +3,7 @@ { "eventVersion": "2.0", "eventSource": "aws:s3", - "awsRegion": "YOUR_AWS_REGION", + "awsRegion": "us-east-1", "eventTime": "1970-01-01T00:00:00.000Z", "eventName": "ObjectCreated:Put", "userIdentity": { @@ -20,14 +20,14 @@ "s3SchemaVersion": "1.0", "configurationId": "testConfigRule", "bucket": { - "name": "YOUR_S3_BUCKET", + "name": "adrian-navarro-test", "ownerIdentity": { "principalId": "EXAMPLE" }, - "arn": "arn:aws:s3:::example-bucket" + "arn": "arn:aws:s3:::adrian-navarro-test" }, "object": { - "key": "lambda_tutorial/samplesheet_full.csv", + "key": "lambda_tutorial/adrian/samplesheet_full.csv", "size": 1024, "eTag": "0123456789abcdef0123456789abcdef", "sequencer": "0A1B2C3D4E5F678901"