Skip to content

Update robots.txt

Update robots.txt #1

Workflow file for this run

name: Update robots.txt
on:
workflow_dispatch:
schedule:
- cron: "13 6 * * 1"
jobs:
update-robots-txt:
name: Update robots.txt
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Create robots.txt
uses: s-thom/create-robots-txt-action@v1
with:
output-file: public/robots.txt
input-file: app/base-robots.txt
append-allow-rule: false
allowed-bot-names: |
Chrome-Lighthouse
cloudflare-api-token: ${{ secrets.CLOUDFLARE_RADAR_API_TOKEN }}
cloudflare-categories: |
AI Crawler
AI Search
AI Assistant
Search Engine Optinization
Advertising & Marketing
Monitoring & Analytics
Aggregator
Social Media Marketing
dark-visitors-api-token: ${{ secrets.DARK_VISITORS_API_TOKEN }}
dark-visitors-categories: |
AI Assistant
AI Data Scraper
AI Search Crawler
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
with:
add-paths: |
public/robots.txt
commit-message: "Update robots.txt"
branch: robots-txt
delete-branch: true
author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
title: "Update robots.txt"
body: |
# Automated update of robots.txt
Generated by [s-thom/create-robots-txt-action](https://github.com/s-thom/create-robots-txt-action)