Skip to content

trigger

trigger #43

Workflow file for this run

name: Liat Workflow
on:
push:
branches:
- liats/wip/ci_cd
permissions:
id-token: write
contents: read
jobs:
create-conda-env:
runs-on: ubuntu-latest
container:
image: amazonlinux:2
steps:
# Install tar and gzip for code checkout
- name: Install tar
run: yum install -y tar gzip
- name: Checkout Code
uses: actions/checkout@v3 # Checks out the repository under $GITHUB_WORKSPACE.
- name: Setup conda
uses: conda-incubator/setup-miniconda@v2
with:
miniconda-version: "latest"
activate-environment: pheno
python-version: 3.11
# environment-file: etc/example-environment.yml
- name: Install pheno-utils
shell: bash -el {0}
run: |
cd $GITHUB_WORKSPACE
pip install .
conda list
- name: Pack Conda
shell: bash -el {0}
run: |
# Enable adding the kernel to JupyterLab
pip install -q ipykernel
python -m ipykernel install --user --name pheno --display-name "Pheno"
pip install -q conda-pack
conda pack --quiet -n pheno -o $HOME/pheno.tar.gz
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: arn:aws:iam::081569964966:role/github_cicd
aws-region: eu-west-1
- name: Setup AWS CLI
run: |
echo "Installing AWS CLI"
curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
tar -xvf awscliv2.zip
sudo ./aws/install
aws --version
- name: Upload to S3
run: |
echo "Uploading artifact to S3"
DATE=$(date +%Y-%m-%d)
BRANCH=${GITHUB_REF#refs/heads/}
aws s3 cp $HOME/pheno.tar.gz s3://pheno-ds-github/conda_envs/pheno_${BRANCH}_${DATE}.tar.gz