Skip to content

crawl and commit

crawl and commit #11

Workflow file for this run

name: Crawling
on:
push:
branches:
- crawler
schedule:
- cron: "0 */3 * * *"
concurrency:
group: crawling
jobs:
crawling:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
ref: crawler
persist-credentials: false
- name: Checkout data
uses: actions/checkout@v2
with:
persist-credentials: false
ref: crawler-gh-pages
path: ./data
- name: Install
run: yarn install --frozen-lockfile
- name: Pip
uses: actions/setup-python@v4
with:
python-version: "3.9"
cache: "pip" # caching pip dependencies
- name: Pip Install
run: pip install -r requirements.txt
- name: Crawling
run: yarn start
env:
LOG_FORMAT: json
NUM_TERMS: 4
ALWAYS_SCRAPE_CURRENT_TERM: 1
DETAILS_CONCURRENCY: 256
DATA_FOLDER: ./data
NODE_EXTRA_CA_CERTS: ${{ github.workspace }}/intermediate.pem
- name: Revision
run: python ./src/Revise.py
- name: Upload
uses: JamesIves/github-pages-deploy-action@releases/v4
with:
branch: crawler-gh-pages
folder: ./data
clean: true
single-commit: true
- name: Copy Course Data
uses: nkoppel/[email protected]
with:
source-files: './data'
destination-username: 'rohan-bansal'
destination-repository: 'https://github.com/rohan-bansal/gt-course-vis/'
destination-branch: 'main'
destination-directory: 'src/lib/data'
commit-email: '[email protected]'