Added the functionality to add parsers and automatically use them.
This commit is contained in:
parent
6e2df64fe4
commit
0cc1b23353
@ -9,12 +9,13 @@ from scrapy.crawler import Crawler
|
|||||||
from scrapy import log, signals
|
from scrapy import log, signals
|
||||||
from FourmiCrawler.spider import FourmiSpider
|
from FourmiCrawler.spider import FourmiSpider
|
||||||
from scrapy.utils.project import get_project_settings
|
from scrapy.utils.project import get_project_settings
|
||||||
|
from FourmiCrawler.parsers.parser import Parser
|
||||||
|
|
||||||
|
|
||||||
def setup_crawler(searchable):
|
def setup_crawler(searchable):
|
||||||
# [TODO] - Initiate all parsers for the different websites and get
|
# [TODO] - Initiate all parsers for the different websites and get allowed URLs.
|
||||||
# allowed URLs.
|
|
||||||
spider = FourmiSpider(compound=searchable)
|
spider = FourmiSpider(compound=searchable)
|
||||||
|
spider.add_parser(Parser())
|
||||||
settings = get_project_settings()
|
settings = get_project_settings()
|
||||||
crawler = Crawler(settings)
|
crawler = Crawler(settings)
|
||||||
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
||||||
@ -28,4 +29,5 @@ def start():
|
|||||||
log.start()
|
log.start()
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
start()
|
start()
|
||||||
|
@ -2,8 +2,11 @@ from scrapy import log
|
|||||||
|
|
||||||
|
|
||||||
class Parser:
|
class Parser:
|
||||||
|
'''
|
||||||
|
website should be an regular expression of websites you want to parse.
|
||||||
|
'''
|
||||||
website = "http://localhost/*"
|
website = "http://localhost/*"
|
||||||
|
|
||||||
def parse(self, reponse):
|
def parse(self, reponse):
|
||||||
log.msg("The parse function of the empty parser was used.", level=log.Warning)
|
log.msg("The parse function of the empty parser was used.", level=log.WARNING)
|
||||||
pass
|
pass
|
@ -1,19 +1,24 @@
|
|||||||
from scrapy.spider import Spider
|
from scrapy.spider import Spider
|
||||||
|
from scrapy import log
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
class FourmiSpider(Spider):
|
class FourmiSpider(Spider):
|
||||||
name = "FourmiSpider"
|
name = "FourmiSpider"
|
||||||
|
start_urls = ["http://localhost/"]
|
||||||
|
parsers = []
|
||||||
|
|
||||||
def __init__(self, compound=None, *args, **kwargs):
|
def __init__(self, compound=None, *args, **kwargs):
|
||||||
super(FourmiSpider, self).__init__(*args, **kwargs)
|
super(FourmiSpider, self).__init__(*args, **kwargs)
|
||||||
self.synonyms = [compound]
|
self.synonyms = [compound]
|
||||||
|
|
||||||
|
def parse(self, reponse):
|
||||||
def parse(self, reponse):
|
for parser in self.parsers:
|
||||||
# [TODO] - This function should delegate it's functionality to other
|
if re.match(parser.website, reponse.url):
|
||||||
# parsers.
|
log.msg("Url: " + reponse.url + " -> Parser: " + parser.website, level=log.DEBUG)
|
||||||
pass
|
return parser.parse(reponse)
|
||||||
|
return none
|
||||||
|
|
||||||
|
|
||||||
def add_parser(self, parser):
|
def add_parser(self, parser):
|
||||||
self.parsers.add(parser)
|
self.parsers.append(parser)
|
||||||
|
Reference in New Issue
Block a user