-
Notifications
You must be signed in to change notification settings - Fork 7
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* Article checker * seperate formatters file
- Loading branch information
Showing
9 changed files
with
401 additions
and
61 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,65 @@ | ||
name: Check articles are valid | ||
|
||
on: | ||
workflow_call: | ||
inputs: | ||
datasource: | ||
type: string | ||
required: true | ||
workflow_dispatch: # allow manual triggering | ||
inputs: | ||
datasource: | ||
description: 'The datasource to process' | ||
type: choice | ||
options: | ||
- all | ||
- agentmodels | ||
- agisf | ||
- aisafety.info | ||
- alignment_newsletter | ||
- alignmentforum | ||
- arbital | ||
- arxiv | ||
- blogs | ||
- distill | ||
- eaforum | ||
- indices | ||
- lesswrong | ||
- special_docs | ||
- youtube | ||
schedule: | ||
- cron: "0 */4 * * *" # Every 4 hours | ||
|
||
jobs: | ||
build-dataset: | ||
runs-on: ubuntu-latest | ||
|
||
steps: | ||
- name: Checkout repository | ||
uses: actions/checkout@v2 | ||
|
||
- name: Setup Python environment | ||
uses: actions/setup-python@v2 | ||
with: | ||
python-version: '3.x' | ||
|
||
- name: Install Pandoc | ||
run: | | ||
if [ "${{ inputs.datasource }}" = "gdocs" ]; then | ||
sudo apt-get update | ||
sudo apt-get -y install pandoc | ||
fi | ||
- name: Install dependencies | ||
run: pip install -r requirements.txt | ||
|
||
- name: Process dataset | ||
env: | ||
CODA_TOKEN: ${{ secrets.CODA_TOKEN }} | ||
AIRTABLE_API_KEY: ${{ secrets.AIRTABLE_API_KEY }} | ||
YOUTUBE_API_KEY: ${{ secrets.YOUTUBE_API_KEY }} | ||
ARD_DB_USER: ${{ secrets.ARD_DB_USER }} | ||
ARD_DB_PASSWORD: ${{ secrets.ARD_DB_PASSWORD }} | ||
ARD_DB_HOST: ${{ secrets.ARD_DB_HOST }} | ||
ARD_DB_NAME: alignment_research_dataset | ||
run: python main.py fetch ${{ inputs.datasource }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,60 @@ | ||
import re | ||
from typing import Any, Dict, List | ||
|
||
from align_data.settings import ARTICLE_MAIN_KEYS | ||
from align_data.sources.utils import merge_dicts | ||
|
||
|
||
def normalize_url(url: str | None) -> str | None: | ||
if not url: | ||
return url | ||
|
||
# ending '/' | ||
url = url.rstrip("/") | ||
|
||
# Remove http and use https consistently | ||
url = url.replace("http://", "https://") | ||
|
||
# Remove www | ||
url = url.replace("https://www.", "https://") | ||
|
||
# Remove index.html or index.htm | ||
url = re.sub(r'/index\.html?$', '', url) | ||
|
||
# Convert youtu.be links to youtube.com | ||
url = url.replace("https://youtu.be/", "https://youtube.com/watch?v=") | ||
|
||
# Additional rules for mirror domains can be added here | ||
|
||
# agisafetyfundamentals.com -> aisafetyfundamentals.com | ||
url = url.replace("https://agisafetyfundamentals.com", "https://aisafetyfundamentals.com") | ||
|
||
return url | ||
|
||
|
||
def normalize_text(text: str | None) -> str | None: | ||
return (text or '').replace('\n', ' ').replace('\r', '').strip() or None | ||
|
||
|
||
def format_authors(authors: List[str]) -> str: | ||
# TODO: Don't keep adding the same authors - come up with some way to reuse them | ||
authors_str = ",".join(authors) | ||
if len(authors_str) > 1024: | ||
authors_str = ",".join(authors_str[:1024].split(",")[:-1]) | ||
return authors_str | ||
|
||
|
||
def article_dict(data, **kwargs) -> Dict[str, Any]: | ||
data = merge_dicts(data, kwargs) | ||
|
||
summaries = data.pop("summaries", []) | ||
summary = data.pop("summary", None) | ||
|
||
data['summaries'] = summaries + [summary] if summary else [] | ||
data['authors'] = format_authors(data.pop("authors", [])) | ||
data['title'] = normalize_text(data.get('title')) | ||
|
||
return dict( | ||
meta={k: v for k, v in data.items() if k not in ARTICLE_MAIN_KEYS and v is not None}, | ||
**{k: v for k, v in data.items() if k in ARTICLE_MAIN_KEYS}, | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
import logging | ||
from datetime import datetime, timedelta | ||
from typing import Any, List | ||
|
||
from tqdm import tqdm | ||
from sqlalchemy.exc import IntegrityError | ||
from align_data.common.formatters import normalize_url, normalize_text, article_dict | ||
from align_data.db.session import make_session | ||
from align_data.db.models import Article | ||
from align_data.sources.articles.parsers import item_metadata | ||
from align_data.sources.articles.html import fetch | ||
|
||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
def update_article_field(article: Article, field: str, value: Any): | ||
if not value: | ||
return | ||
|
||
if field == 'url' and normalize_url(article.url) == normalize_url(value): | ||
# This is pretty much the same url, so don't modify it | ||
return | ||
if field == 'title' and normalize_text(article.title) == normalize_text(value): | ||
# If there are slight differences in the titles (e.g. punctuation), assume the | ||
# database version is more correct | ||
return | ||
if field == 'meta': | ||
article.meta = article.meta or {} | ||
for k, v in value.items(): | ||
meta_val = article.meta.get(k) | ||
try: | ||
if not meta_val or v > meta_val: | ||
article.meta[k] = v | ||
except Exception as e: | ||
# Ignore exceptions here - the metadata isn't that important | ||
logger.info('Error checking metadata value for article %s: %s', article.url, value) | ||
return | ||
|
||
article_val = getattr(article, field, None) | ||
# Assume that if the provided value is larger (or later, in the case of dates), then it's | ||
# better. This might very well not hold, but it seems like a decent heuristic? | ||
if not article_val: | ||
setattr(article, field, value) | ||
elif isinstance(value, datetime) and value > article_val: | ||
setattr(article, field, value) | ||
elif isinstance(value, str) and len(normalize_text(value) or '') > len(normalize_text(article_val) or ''): | ||
setattr(article, field, normalize_text(value)) | ||
|
||
|
||
def update_article(article: Article) -> Article: | ||
"""Check whether there are better data for this article and whether its url is pointing somewhere decent.""" | ||
source_url = article.meta.get('source_url') or article.url | ||
contents = {} | ||
if source_url: | ||
contents = item_metadata(source_url) | ||
|
||
if 'error' not in contents: | ||
for field, value in article_dict(contents).items(): | ||
update_article_field(article, field, value) | ||
else: | ||
logger.info('Error getting contents for %s: %s', article, contents.get('error')) | ||
|
||
if 400 <= fetch(article.url).status_code < 500: | ||
logger.info('Could not get url for %s', article) | ||
article.status = 'Unreachable url' | ||
|
||
article.date_checked = datetime.utcnow() | ||
|
||
return article | ||
|
||
|
||
def check_articles(sources: List[str], batch_size=100): | ||
"""Check `batch_size` articles with the given `sources` to see if they have better data.""" | ||
logger.info('Checking %s articles for %s', batch_size, ', '.join(sources)) | ||
with make_session() as session: | ||
for article in tqdm( | ||
session.query(Article) | ||
.filter(Article.date_checked < datetime.now() - timedelta(weeks=4)) | ||
.filter(Article.source.in_(sources)) | ||
.limit(batch_size) | ||
.all() | ||
): | ||
update_article(article) | ||
session.add(article) | ||
logger.debug('commiting') | ||
try: | ||
session.commit() | ||
except IntegrityError as e: | ||
logger.error(e) |
Oops, something went wrong.