-
Notifications
You must be signed in to change notification settings - Fork 7
/
main.py
23 lines (21 loc) · 1.17 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
#-*-coding:utf8-*-
#author : Lenovo
#date: 2018/8/10
from scrapy.cmdline import execute
import sys
import os
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
#通过main文件名找到父目录
# execute(['scrapy','crawl','jobble']) #通过命令行运行爬虫
# execute(['scrapy','crawl','zhihu']) #通过命令行运行爬虫
execute(['scrapy','crawl','lagou']) #通过命令行运行爬虫
# execute(['scrapy','crawl','tianyancha']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','zhilian']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','shixiseng']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','movie']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','meizi']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','chengren']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','qq']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','tc_cartoon']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','doutu']) #通过命令行运c行爬虫
# execute(['scrapy','crawl','wangyiyun']) #通过命令行运c行爬虫