diff --git a/Scrapy/__init__.py b/Scrapy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Scrapy/__init__.pyc b/Scrapy/__init__.pyc new file mode 100644 index 0000000..f1096fd Binary files /dev/null and b/Scrapy/__init__.pyc differ diff --git a/Scrapy/items.py b/Scrapy/items.py new file mode 100644 index 0000000..17b9d3d --- /dev/null +++ b/Scrapy/items.py @@ -0,0 +1,11 @@ +# Define here the models for your scraped items +# +# See documentation in: +# http://doc.scrapy.org/en/latest/topics/items.html + +from scrapy.item import Item, Field + +class FourmiItem(Item): + # define the fields for your item here like: + # name = Field() + pass diff --git a/Scrapy/pipelines.py b/Scrapy/pipelines.py new file mode 100644 index 0000000..3345787 --- /dev/null +++ b/Scrapy/pipelines.py @@ -0,0 +1,8 @@ +# Define your item pipelines here +# +# Don't forget to add your pipeline to the ITEM_PIPELINES setting +# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html + +class FourmiPipeline(object): + def process_item(self, item, spider): + return item diff --git a/Scrapy/settings.py b/Scrapy/settings.py new file mode 100644 index 0000000..e43aa2b --- /dev/null +++ b/Scrapy/settings.py @@ -0,0 +1,15 @@ +# Scrapy settings for Fourmi project +# +# For simplicity, this file contains only the most important settings by +# default. All the other settings are documented here: +# +# http://doc.scrapy.org/en/latest/topics/settings.html +# + +BOT_NAME = 'Fourmi' + +SPIDER_MODULES = ['Scrapy.spiders'] +NEWSPIDER_MODULE = 'Scrapy.spiders' + +# Crawl responsibly by identifying yourself (and your website) on the user-agent +#USER_AGENT = 'Fourmi (+http://www.yourdomain.com)' diff --git a/Scrapy/settings.pyc b/Scrapy/settings.pyc new file mode 100644 index 0000000..828e883 Binary files /dev/null and b/Scrapy/settings.pyc differ diff --git a/Scrapy/spiders/__init__.py b/Scrapy/spiders/__init__.py new file mode 100644 index 0000000..ebd689a --- /dev/null +++ b/Scrapy/spiders/__init__.py @@ -0,0 +1,4 @@ +# This package will contain the spiders of your Scrapy project +# +# Please refer to the documentation for information on how to create and manage +# your spiders. diff --git a/Scrapy/spiders/__init__.pyc b/Scrapy/spiders/__init__.pyc new file mode 100644 index 0000000..c2fd939 Binary files /dev/null and b/Scrapy/spiders/__init__.pyc differ diff --git a/scrapy.cfg b/scrapy.cfg new file mode 100644 index 0000000..6f432fb --- /dev/null +++ b/scrapy.cfg @@ -0,0 +1,11 @@ +# Automatically created by: scrapy startproject +# +# For more information about the [deploy] section see: +# http://doc.scrapy.org/en/latest/topics/scrapyd.html + +[settings] +default = Scrapy.settings + +[deploy] +#url = http://localhost:6800/ +project = Fourmi