Archived
1
0

Made all Python files PEP-8 Compatible

This commit is contained in:
Jip J. Dekker 2014-03-28 14:11:36 +01:00
parent 5b17627504
commit 87d1041517
4 changed files with 31 additions and 23 deletions

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
""" """
Fourmi - An internet webcrawler searching for information on chemical compounds. Fourmi - An internet webcrawler searching for information on chemical
[todo] - Add some more useful text here. compounds. [todo] - Add some more useful text here.
""" """
from twisted.internet import reactor from twisted.internet import reactor
@ -10,17 +10,19 @@ from scrapy import log, signals
from FourmiCrawler.spiders.Fourmispider import FourmiSpider from FourmiCrawler.spiders.Fourmispider import FourmiSpider
from scrapy.utils.project import get_project_settings from scrapy.utils.project import get_project_settings
def setup_crawler(searchable): def setup_crawler(searchable):
spider = FourmiSpider(compound=searchable) # [todo] - Do something smart to get the different spiders to work here. spider = FourmiSpider(compound=searchable)
settings = get_project_settings() settings = get_project_settings()
crawler = Crawler(settings) crawler = Crawler(settings)
crawler.configure() crawler.configure()
crawler.crawl(spider) crawler.crawl(spider)
crawler.start() crawler.start()
def start(): def start():
setup_crawler("Methane") setup_crawler("Methane")
log.start() log.start()
reactor.run() reactor.run()
start() start()

View File

@ -5,6 +5,7 @@
from scrapy.item import Item, Field from scrapy.item import Item, Field
class Result(Item): class Result(Item):
attribute = Field() attribute = Field()
value = Field() value = Field()

View File

@ -14,5 +14,7 @@ ITEM_PIPELINES = {
'FourmiCrawler.pipelines.FourmiPipeline': 100 'FourmiCrawler.pipelines.FourmiPipeline': 100
} }
# Crawl responsibly by identifying yourself (and your website) on the user-agent # Crawl responsibly by identifying yourself (and your website) on the
#USER_AGENT = 'FourmiCrawler (+http://www.yourdomain.com)' # user-agent
# USER_AGENT = 'FourmiCrawler (+http://www.yourdomain.com)'

View File

@ -1,12 +1,15 @@
from scrapy.spider import Spider from scrapy.spider import Spider
class FourmiSpider(Spider): class FourmiSpider(Spider):
name="FourmiSpider" name = "FourmiSpider"
def __init__(self, compound=None, *args, **kwargs): def __init__(self, compound=None, *args, **kwargs):
super(FourmiSpider, self).__init__(*args, **kwargs) super(FourmiSpider, self).__init__(*args, **kwargs)
# [TODO] - Initiate all parsers for the different websites and get allowed URLs. # [TODO] - Initiate all parsers for the different websites and get
# allowed URLs.
def parse(self, reponse): def parse(self, reponse):
# [TODO] - This function should delegate it's functionality to other parsers. # [TODO] - This function should delegate it's functionality to other
pass # parsers.
pass