-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbs_index_page.py
30 lines (28 loc) · 1.12 KB
/
bs_index_page.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
from bs4 import BeautifulSoup
import os
import requests
dirs = ['COOL', 'HOT']
parent_dirs = ['C4','C5', 'C6', 'C13']
proxies = {'http':'', 'https':''}
base_url = 'http://astronomy.nmsu.edu/jasonj/JOYCE/FIGURES/K2_FIGS/DATA/'
for pdir in parent_dirs:
for cdir in dirs:
tail_url = pdir + '/' + cdir + '/'
url = base_url + tail_url
r = requests.get(url, proxies=proxies)
data = r.text
soup = BeautifulSoup(data)
for link in soup.find_all('tr'):
if link.find_all('th'):
pass
else:
for newlink in link.find_all('a'):
fname = newlink.get('href')
req = requests.get(url + fname, proxies=proxies)
if req.status_code == 200:
new_path = os.path.join('/Devel/k2/data/' + tail_url)
if not os.path.exists(new_path):
os.makedirs(new_path)
with open('/Devel/k2/data/' + tail_url + fname, 'w') as fp:
fp.write(req.content)
print(fname)