Skip to content

Dump GTQ

Dump GTQ #10

Workflow file for this run

name: Dump GTQ JSON to S3 Production
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
jobs:
deploy:
env:
AWS_ACCESS_KEY_ID: ${{ secrets.S3_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_SECRET_ACCESS_KEY }}
AWS_REGION: ca-central-1
WEBHOSTING_BUCKET_NAME: huginn-data
runs-on: ubuntu-latest
steps:
- name: Download events from Artsdata
run: curl --header 'Accept:application/json' 'http://api.artsdata.ca/query.jsonld?limit=300&frame=lavitrine/events3&sparql=lavitrine/events3&graph=http://kg.artsdata.ca/culture-creates/huginn/derived-grandtheatre-qc-ca' > grandtheatre-qc-ca.json
- name: Set current date as env variable
id: version # this is used on variable path
run: |
- echo "dumpdate=$(date +'%Y-%m-%dT%H:%M:%S%z')" >> $GITHUB_OUTPUT
- name: Upload to S3
run: aws s3 cp grandtheatre-qc-ca.json s3://${{ env.WEBHOSTING_BUCKET_NAME }}/grandtheatre-qc-ca-${{ steps.version.outputs.dumpdate }}.json --acl public-read