-
Notifications
You must be signed in to change notification settings - Fork 19
/
fetch_news_sites.py
44 lines (39 loc) · 1.57 KB
/
fetch_news_sites.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
from bs4 import BeautifulSoup
import iso3166
import json
import requests
BASE_URL = 'http://abyznewslinks.com/'
def fetch_country_pages():
"""Fetch the map of URLs for the listings for each country"""
country_map = {}
country_listing ='allco.htm'
response = requests.get(BASE_URL + country_listing)
soup = BeautifulSoup(response.content, 'html.parser')
# Not naming things makes me a sad panda and parsing like this exist
country_table = soup.findAll('table')[5]
country_links = country_table.findAll('a')
for country in country_links:
if country.text.upper() in iso3166.countries_by_name:
alpha2 = iso3166.countries_by_name[country.text.upper()].alpha2
country_map[alpha2] = BASE_URL + country.attrs['href']
return country_map
def news_sites_for_country(country_page):
"""Given a URL to the listing of news sites in a country, return a list
of news sites URL"""
news_links = []
response = requests.get(country_page)
soup = BeautifulSoup(response.content, 'html.parser')
# Awesome HTML
for table in soup.findAll('table')[4:-1]:
website_links = table.findAll('a')
news_links.extend(link.attrs.get('href') for link in website_links
if link.attrs.get('href')
)
return news_links
if __name__ == '__main__':
pages = fetch_country_pages()
country_news_map = {}
for country, url in pages.items():
country_news_map[country] = news_sites_for_country(url)
with open('./country_news_map.json', 'w') as f:
f.write(json.dumps(country_news_map))