Archived
1
0

Added the functionality to add parsers and automatically use them.

This commit is contained in:
Jip J. Dekker 2014-03-30 23:37:42 +02:00
parent 6e2df64fe4
commit 0cc1b23353
3 changed files with 33 additions and 23 deletions

View File

@ -9,23 +9,25 @@ from scrapy.crawler import Crawler
from scrapy import log, signals from scrapy import log, signals
from FourmiCrawler.spider import FourmiSpider from FourmiCrawler.spider import FourmiSpider
from scrapy.utils.project import get_project_settings from scrapy.utils.project import get_project_settings
from FourmiCrawler.parsers.parser import Parser
def setup_crawler(searchable): def setup_crawler(searchable):
# [TODO] - Initiate all parsers for the different websites and get # [TODO] - Initiate all parsers for the different websites and get allowed URLs.
# allowed URLs. spider = FourmiSpider(compound=searchable)
spider = FourmiSpider(compound=searchable) spider.add_parser(Parser())
settings = get_project_settings() settings = get_project_settings()
crawler = Crawler(settings) crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed) crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure() crawler.configure()
crawler.crawl(spider) crawler.crawl(spider)
crawler.start() crawler.start()
def start(): def start():
setup_crawler("Methane") setup_crawler("Methane")
log.start() log.start()
reactor.run() reactor.run()
start() start()

View File

@ -2,8 +2,11 @@ from scrapy import log
class Parser: class Parser:
website = "http://localhost/*" '''
website should be an regular expression of websites you want to parse.
'''
website = "http://localhost/*"
def parse(self, reponse): def parse(self, reponse):
log.msg("The parse function of the empty parser was used.", level=log.Warning) log.msg("The parse function of the empty parser was used.", level=log.WARNING)
pass pass

View File

@ -1,19 +1,24 @@
from scrapy.spider import Spider from scrapy.spider import Spider
from scrapy import log
import re
class FourmiSpider(Spider): class FourmiSpider(Spider):
name = "FourmiSpider" name = "FourmiSpider"
start_urls = ["http://localhost/"]
parsers = []
def __init__(self, compound=None, *args, **kwargs): def __init__(self, compound=None, *args, **kwargs):
super(FourmiSpider, self).__init__(*args, **kwargs) super(FourmiSpider, self).__init__(*args, **kwargs)
self.synonyms = [compound] self.synonyms = [compound]
def parse(self, reponse):
def parse(self, reponse): for parser in self.parsers:
# [TODO] - This function should delegate it's functionality to other if re.match(parser.website, reponse.url):
# parsers. log.msg("Url: " + reponse.url + " -> Parser: " + parser.website, level=log.DEBUG)
pass return parser.parse(reponse)
return none
def add_parser(self, parser): def add_parser(self, parser):
self.parsers.add(parser) self.parsers.append(parser)