Skip to content

trigger

trigger #38

Workflow file for this run

name: Liat Workflow
on:
push:
branches:
- liats/wip/ci_cd
permissions:
id-token: write
contents: read
jobs:
create-conda-env:
runs-on: ubuntu-latest
container:
image: amazonlinux:2
steps:
# Install tar and gzip for code checkout
- name: Install tar
run: yum install -y tar gzip
- name: Checkout Code
uses: actions/checkout@v3 # Checks out the repository under $GITHUB_WORKSPACE.
- name: Setup conda
uses: conda-incubator/setup-miniconda@v2
with:
activate-environment: pheno
python-version: 3.11
- name: Conda info
shell: bash -el {0}
run: conda info
- name: Pheno Utils
shell: bash -el {0}
run: conda install pheno-utils
# - name: Install conda
# continue-on-error: true
# run: |
# curl -sS -O https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh
# bash Miniconda3-latest-Linux-x86_64.sh -b -u -p $HOME/miniconda
# - name: Create conda environment
# run : |
# export PATH=$HOME/miniconda/bin:$PATH
# conda init bash
# # conda create -q -n test-env python=3.10 > /dev/null
# # conda init bash > /dev/null
# # conda activate test-env
# # conda install pheno-utils
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v3
with:
role-to-assume: arn:aws:iam::081569964966:role/github_cicd
aws-region: eu-west-1
- name: Create and Upload Artifact to S3
run: |
# Example of creating a tar.gz archive and uploading it
echo "Uploading artifact to S3"
# tar -czvf artifact.tar.gz /path/to/your/files
# aws s3 cp artifact.tar.gz s3://YOUR_S3_BUCKET_NAME/path/to/upload/