Added an basic Scrapy setup
This commit is contained in:
parent
94b1d24b35
commit
3cbaa99817
0
Scrapy/__init__.py
Normal file
0
Scrapy/__init__.py
Normal file
BIN
Scrapy/__init__.pyc
Normal file
BIN
Scrapy/__init__.pyc
Normal file
Binary file not shown.
11
Scrapy/items.py
Normal file
11
Scrapy/items.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# Define here the models for your scraped items
|
||||||
|
#
|
||||||
|
# See documentation in:
|
||||||
|
# http://doc.scrapy.org/en/latest/topics/items.html
|
||||||
|
|
||||||
|
from scrapy.item import Item, Field
|
||||||
|
|
||||||
|
class FourmiItem(Item):
|
||||||
|
# define the fields for your item here like:
|
||||||
|
# name = Field()
|
||||||
|
pass
|
8
Scrapy/pipelines.py
Normal file
8
Scrapy/pipelines.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# Define your item pipelines here
|
||||||
|
#
|
||||||
|
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
|
||||||
|
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
||||||
|
|
||||||
|
class FourmiPipeline(object):
|
||||||
|
def process_item(self, item, spider):
|
||||||
|
return item
|
15
Scrapy/settings.py
Normal file
15
Scrapy/settings.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Scrapy settings for Fourmi project
|
||||||
|
#
|
||||||
|
# For simplicity, this file contains only the most important settings by
|
||||||
|
# default. All the other settings are documented here:
|
||||||
|
#
|
||||||
|
# http://doc.scrapy.org/en/latest/topics/settings.html
|
||||||
|
#
|
||||||
|
|
||||||
|
BOT_NAME = 'Fourmi'
|
||||||
|
|
||||||
|
SPIDER_MODULES = ['Scrapy.spiders']
|
||||||
|
NEWSPIDER_MODULE = 'Scrapy.spiders'
|
||||||
|
|
||||||
|
# Crawl responsibly by identifying yourself (and your website) on the user-agent
|
||||||
|
#USER_AGENT = 'Fourmi (+http://www.yourdomain.com)'
|
BIN
Scrapy/settings.pyc
Normal file
BIN
Scrapy/settings.pyc
Normal file
Binary file not shown.
4
Scrapy/spiders/__init__.py
Normal file
4
Scrapy/spiders/__init__.py
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# This package will contain the spiders of your Scrapy project
|
||||||
|
#
|
||||||
|
# Please refer to the documentation for information on how to create and manage
|
||||||
|
# your spiders.
|
BIN
Scrapy/spiders/__init__.pyc
Normal file
BIN
Scrapy/spiders/__init__.pyc
Normal file
Binary file not shown.
11
scrapy.cfg
Normal file
11
scrapy.cfg
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# Automatically created by: scrapy startproject
|
||||||
|
#
|
||||||
|
# For more information about the [deploy] section see:
|
||||||
|
# http://doc.scrapy.org/en/latest/topics/scrapyd.html
|
||||||
|
|
||||||
|
[settings]
|
||||||
|
default = Scrapy.settings
|
||||||
|
|
||||||
|
[deploy]
|
||||||
|
#url = http://localhost:6800/
|
||||||
|
project = Fourmi
|
Reference in New Issue
Block a user