Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Increase tw-cli to v0.9.2 #3

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ ARG LAMBDA_TASK_ROOT=/var/task

RUN apt-get update && apt-get upgrade -y && \
apt-get install --no-install-recommends --yes wget ca-certificates && \
wget https://github.com/seqeralabs/tower-cli/releases/download/v0.5/tw-0.5-linux-x86_64 && \
chmod +x ./tw-0.5-linux-x86_64 && \
mv ./tw-0.5-linux-x86_64 /usr/local/bin/tw && \
wget https://github.com/seqeralabs/tower-cli/releases/download/v0.9.2/tw-linux-x86_64 && \
chmod +x ./tw-linux-x86_64 && \
mv ./tw-linux-x86_64 /usr/local/bin/tw && \
rm -rf /var/lib/apt/lists/*

WORKDIR ${LAMBDA_TASK_ROOT}
Expand Down
40 changes: 20 additions & 20 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ def get_parameters(session=None, params_to_retrieve=None):
underlying image or Lambda function code.

Example: {'Parameter':
{'Name': '/lambda_tutorial/workspace_id',
{'Name': '/lambda_tutorial_adrian/workspace_id',
'Type': 'String',
'Value': '34830707738561',
'Version': 3,
Expand All @@ -124,7 +124,7 @@ def get_parameters(session=None, params_to_retrieve=None):
logger.debug(tw_params[param])

# Update logging_level based on logging_level key.
if param == '/lambda_tutorial/logging_level':
if param == '/lambda_tutorial_adrian/logging_level':
desired_level = tw_params[param]
if desired_level.upper() != "DEBUG":
update_logging_level(desired_level=desired_level)
Expand All @@ -148,13 +148,13 @@ def get_secrets(session=None):
for a simpler implementation.

Example: {'tower_PAT':
{'ARN': 'arn:aws:secretsmanager:YOUR_REGION:YOUR_ACCOUNT:secret:lambda_tutorial/tower_PAT-Abcdef',
'Name': 'lambda_tutorial/tower_PAT',
{'ARN': 'arn:aws:secretsmanager:YOUR_REGION:YOUR_ACCOUNT:secret:lambda_tutorial_adrian/tower_PAT-Abcdef',
'Name': 'lambda_tutorial_adrian/tower_PAT',
'SecretString': 'eyJ0.....',
...
'''
tw_secrets = {}
secret_name = "lambda_tutorial/tower_PAT"
secret_name = "lambda_tutorial_adrian/tower_PAT"
secrets_client: SecretsManagerClient = session.client('secretsmanager')

try:
Expand Down Expand Up @@ -205,7 +205,7 @@ def set_environment_variables(tw_params=None, tw_secrets=None):
Set this before running tw transactions.
'''
os.environ['TOWER_ACCESS_TOKEN'] = tw_secrets['tower_PAT']
os.environ['TOWER_API_ENDPOINT'] = tw_params['/lambda_tutorial/tower_api_endpoint']
os.environ['TOWER_API_ENDPOINT'] = tw_params['/lambda_tutorial_adrian/tower_api_endpoint']


def check_if_event_in_scope(event=None, tw_params=None):
Expand All @@ -216,23 +216,23 @@ def check_if_event_in_scope(event=None, tw_params=None):
2) Boto3 S3 client list_objects requires the Prefix parameter to have a '/' at the end
so we add this back in since the split function removes it
Example of object key:
"lambda_tutorial/samplesheet_full.csv"
"lambda_tutorial_adrian/samplesheet_full.csv"
'''
# Check if event should be processed or ignored. Cease processing if:
# 1) Notification isn't from designated prefix.
# 2) Notification doesn't match file type trigger.
event_key = event['Records'][0]['s3']['object']['key']
filetype = event_key.rsplit('.', 1)[1]

if not event_key.startswith(tw_params['/lambda_tutorial/s3_root_prefix']):
if not event_key.startswith(tw_params['/lambda_tutorial_adrian/s3_root_prefix']):
# Event is out of scope and should not be retried.
log_error_and_raise_exception(
errorstring=f"Event key: {event_key} does not match designated prefix. Cease processing.",
e=None,
retry_transaction=False
)

if filetype not in tw_params['/lambda_tutorial/samplesheet_file_types'].split(','):
if filetype not in tw_params['/lambda_tutorial_adrian/samplesheet_file_types'].split(','):
# Event is out of scope and should not be retried.
log_error_and_raise_exception(
errorstring=f"Event key: {event_key} not a trigger file type. Cease processing.",
Expand All @@ -253,7 +253,7 @@ def download_samplesheet(session=None, event=None):
try:
s3bucket = event['Records'][0]['s3']['bucket']['name']
s3key = event['Records'][0]['s3']['object']['key']
# Example of key: "lambda_tutorial/complete.txt"
# Example of key: "lambda_tutorial_adrian/complete.txt"
samplesheet_filename = s3key.rsplit('/')[1]
dataset_name = samplesheet_filename.split('.')[0]

Expand Down Expand Up @@ -296,7 +296,7 @@ def create_tower_dataset(local_samplesheet=None, dataset_name=None, event=None,
s3key = event['Records'][0]['s3']['object']['key']
s3source = f"s3://{s3bucket}/{s3key}"

workspace_id = tw_params['/lambda_tutorial/workspace_id']
workspace_id = tw_params['/lambda_tutorial_adrian/workspace_id']
description = f"Generated by Lambda {datetime.datetime.now().strftime('%Y-%m-%d %H:%M')} from {s3source}"

# Python subprocess module works best with command split into array.
Expand Down Expand Up @@ -340,8 +340,8 @@ def launch_tower_pipeline(datasetid=None, tw_params=None):
3) Invoke the target pipeline passing the parameter file with the defined input source.
'''
# Extract parameters for inclusion in tw commands
workspace_id = tw_params['/lambda_tutorial/workspace_id']
target_pipeline_name = tw_params['/lambda_tutorial/target_pipeline_name']
workspace_id = tw_params['/lambda_tutorial_adrian/workspace_id']
target_pipeline_name = tw_params['/lambda_tutorial_adrian/target_pipeline_name']

# Generate command for dataset URL retrieval (required for subsequent pipeline launch comand)
command = f"tw -o json datasets url --workspace={workspace_id} --id={datasetid}"
Expand Down Expand Up @@ -469,7 +469,7 @@ def update_logging_level(desired_level=None):
logger.warning(f"Modified logging level to {desired_level}")
else:
# Create logger alert and stick with default DEBUG
logger.error(f"SSM parameter 'lambda_tutorial/logging_level' value {desired_level} is not a valid logging level. Continuing with DEBUG.")
logger.error(f"SSM parameter 'lambda_tutorial_adrian/logging_level' value {desired_level} is not a valid logging level. Continuing with DEBUG.")


def handler(event, context):
Expand All @@ -490,12 +490,12 @@ def handler(event, context):
# Keynames are odd for a Python dictionary, but it works and aligns with required AWS set-up commands.
# Keep logging_level as first entry to control logging behaviour of other values when retrieved.
params_to_retrieve = [
'/lambda_tutorial/logging_level',
'/lambda_tutorial/workspace_id',
'/lambda_tutorial/s3_root_prefix',
'/lambda_tutorial/samplesheet_file_types',
'/lambda_tutorial/target_pipeline_name',
"/lambda_tutorial/tower_api_endpoint"
'/lambda_tutorial_adrian/logging_level',
'/lambda_tutorial_adrian/workspace_id',
'/lambda_tutorial_adrian/s3_root_prefix',
'/lambda_tutorial_adrian/samplesheet_file_types',
'/lambda_tutorial_adrian/target_pipeline_name',
"/lambda_tutorial_adrian/tower_api_endpoint"
]
tw_params = get_parameters(session=session, params_to_retrieve=params_to_retrieve)
logger.debug(f"Parameters are: {tw_params}")
Expand Down
4 changes: 2 additions & 2 deletions entry_script.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

# Reference: https://docs.aws.amazon.com/lambda/latest/dg/images-test.html
if [ -z "${AWS_LAMBDA_RUNTIME_API}" ]; then
exec /usr/local/bin/aws-lambda-rie-x86_64 /usr/bin/python3 -m awslambdaric "$@"
exec /usr/local/bin/aws-lambda-rie-x86_64 /usr/local/bin/python -m awslambdaric "$@"
else
exec /usr/bin/python3 -m awslambdaric "$@"
exec /usr/local/bin/python -m awslambdaric "$@"
fi
9 changes: 9 additions & 0 deletions samplesheet_full.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
sample,fastq_1,fastq_2,strandedness
GM12878_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603629_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603629_T1_2.fastq.gz,reverse
GM12878_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603630_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603630_T1_2.fastq.gz,reverse
K562_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603392_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603392_T1_2.fastq.gz,reverse
K562_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603393_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX1603393_T1_2.fastq.gz,reverse
MCF7_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370490_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370490_T1_2.fastq.gz,reverse
MCF7_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370491_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370491_T1_2.fastq.gz,reverse
H1_REP1,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370468_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370468_T1_2.fastq.gz,reverse
H1_REP2,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370469_T1_1.fastq.gz,s3://nf-core-awsmegatests/rnaseq/input_data/SRX2370469_T1_2.fastq.gz,reverse
8 changes: 4 additions & 4 deletions testing/test_event_good.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
{
"eventVersion": "2.0",
"eventSource": "aws:s3",
"awsRegion": "YOUR_AWS_REGION",
"awsRegion": "us-east-1",
"eventTime": "1970-01-01T00:00:00.000Z",
"eventName": "ObjectCreated:Put",
"userIdentity": {
Expand All @@ -20,14 +20,14 @@
"s3SchemaVersion": "1.0",
"configurationId": "testConfigRule",
"bucket": {
"name": "YOUR_S3_BUCKET",
"name": "adrian-navarro-test",
"ownerIdentity": {
"principalId": "EXAMPLE"
},
"arn": "arn:aws:s3:::example-bucket"
"arn": "arn:aws:s3:::adrian-navarro-test"
},
"object": {
"key": "lambda_tutorial/samplesheet_full.csv",
"key": "lambda_tutorial/adrian/samplesheet_full.csv",
"size": 1024,
"eTag": "0123456789abcdef0123456789abcdef",
"sequencer": "0A1B2C3D4E5F678901"
Expand Down