-
Notifications
You must be signed in to change notification settings - Fork 0
/
scrap10.py
50 lines (35 loc) · 1.32 KB
/
scrap10.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
#Python program to scrape website
#and save quotes from website
import requests
from bs4 import BeautifulSoup
# import csv
def scrape_paragraphs(url):
response = requests.get(url)
soup = BeautifulSoup(response.content, "html.parser")
paragraphs = soup.find_all('p')
return [p.text for p in paragraphs]
url = "https://www.dittapotek.no/rad-for-bedre-helse"
with open("scr10.txt", "w", encoding="utf-8") as f:
for paragraph in p:
f.write(paragraph + "\n\n")
# URL = "https://www.dittapotek.no/c/munn--og-tannpleie/a/A50002"
# r = requests.get(URL)
# soup = BeautifulSoup(r.content, 'html5lib')
# quotes=[] # a list to store quotes
# # table = soup.find('div')
# table = soup.find('div', attrs = {'id':'all_quotes'})
# for row in table.findAll('div',
# attrs = {'class':'js-shortcut-section'}):
# quote = {}
# quote['theme'] = row.h5.text
# quote['url'] = row.a['href']
# quote['img'] = row.img['src']
# quote['lines'] = row.img['alt'].split(" #")[0]
# quote['author'] = row.img['alt'].split(" #")[1]
# quotes.append(quote)
# filename = 'munn10.csv'
# with open(filename, 'w', newline='') as f:
# w = csv.DictWriter(f,['theme','url','img','lines','author'])
# w.writeheader()
# for quote in quotes:
# w.writerow(quote)