forked from bda-research/node-crawler
-
Notifications
You must be signed in to change notification settings - Fork 0
/
package.json
64 lines (64 loc) · 1.59 KB
/
package.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
{
"name": "crawler",
"version": "0.2.5",
"description": "Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously. Scraping should be simple and fun!",
"keywords": [
"dom",
"javascript",
"crawling",
"spider",
"scraper",
"scraping",
"jquery"
],
"maintainers": [
{
"name": "Sylvain Zimmer",
"email": "[email protected]",
"url": "http://sylvinus.org/"
}
],
"bugs": {
"mail": "[email protected]",
"url": "http://github.com/sylvinus/node-crawler/issues"
},
"licenses": [
{
"type": "MIT",
"url": "http://github.com/sylvinus/node-crawler/blob/master/LICENSE.txt"
}
],
"repository":
{
"type": "git",
"url": "https://github.com/sylvinus/node-crawler.git"
}
,
"dependencies": {
"htmlparser": "1.7.x",
"request": "2.27.x",
"jsdom": "0.8.x",
"generic-pool": "2.0.x",
"underscore": "1.5.x",
"jschardet": "1.1.x",
"iconv-lite": "0.2.x"
},
"optionalDependencies":{
"iconv": "2.0.x"
},
"devDependencies": {
"qunit": "0.5.x",
"express": "3.4.x",
"memwatch": "0.2.x"
},
"scripts": {
"test": "node test/testrunner.js"
},
"engines": [
"node >=0.6.x"
],
"directories": {
"lib": "lib"
},
"main": "./lib/crawler"
}