Archived
1
0

Merge branch 'hotfix/No_TABs'

This commit is contained in:
Jip J. Dekker 2014-04-02 14:21:34 +02:00
commit 9805bb5adb
4 changed files with 66 additions and 66 deletions

View File

@ -13,33 +13,33 @@ from scrapy.utils.project import get_project_settings
import os, inspect, re
def load_parsers(rel_dir="FourmiCrawler/parsers"):
path = os.path.dirname(os.path.abspath(__file__))
path += "/" + rel_dir
parsers = []
path = os.path.dirname(os.path.abspath(__file__))
path += "/" + rel_dir
parsers = []
for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']:
mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py])
classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))]
for cls in classes:
if re.match(path + "/*", inspect.getfile(cls)):
parsers.append(cls()) # [review] - Would we ever need arguments for the parsers?
return parsers
for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']:
mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py])
classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))]
for cls in classes:
if re.match(path + "/*", inspect.getfile(cls)):
parsers.append(cls()) # [review] - Would we ever need arguments for the parsers?
return parsers
def setup_crawler(searchables):
spider = FourmiSpider(compounds=searchables)
spider.add_parsers(load_parsers())
settings = get_project_settings()
crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
spider = FourmiSpider(compounds=searchables)
spider.add_parsers(load_parsers())
settings = get_project_settings()
crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure()
crawler.crawl(spider)
crawler.start()
def start():
setup_crawler(["Methane"])
log.start()
reactor.run()
setup_crawler(["Methane"])
log.start()
reactor.run()
start()

View File

@ -3,19 +3,19 @@ from scrapy import log
class Parser:
'''
website should be an regular expression of the urls of request the parser is able to parse.
'''
website = "http://something/*"
__spider = None
'''
website should be an regular expression of the urls of request the parser is able to parse.
'''
website = "http://something/*"
__spider = None
def parse(self, reponse):
log.msg("The parse function of the empty parser was used.", level=log.WARNING)
pass
def parse(self, reponse):
log.msg("The parse function of the empty parser was used.", level=log.WARNING)
pass
def new_compound_request(self, compound):
# return Request(url=self.website[:-1] + compound, callback=self.parse)
pass
def new_compound_request(self, compound):
# return Request(url=self.website[:-1] + compound, callback=self.parse)
pass
def set_spider(self, spider):
self.__spider = spider
def set_spider(self, spider):
self.__spider = spider

View File

@ -19,7 +19,7 @@ class FourmiPipeline(object):
"""
value = item['attribute'], item['value']
if value in self.known_values:
raise DropItem("Duplicate item found: %s" % item)
raise DropItem("Duplicate item found: %s" % item) # #[todo] append sources of first item.
else:
self.known_values.add(value)
return item

View File

@ -4,40 +4,40 @@ import re
class FourmiSpider(Spider):
name = "FourmiSpider"
__parsers = []
synonyms = []
name = "FourmiSpider"
__parsers = []
synonyms = []
def __init__(self, compounds=None, *args, **kwargs):
super(FourmiSpider, self).__init__(*args, **kwargs)
if isinstance(compounds, list):
self.synonyms.extend(compounds)
else:
self.synonyms.append(compounds)
def __init__(self, compounds=None, *args, **kwargs):
super(FourmiSpider, self).__init__(*args, **kwargs)
if isinstance(compounds, list):
self.synonyms.extend(compounds)
else:
self.synonyms.append(compounds)
def parse(self, reponse):
for parser in self.__parsers:
if re.match(parser.website, reponse.url):
log.msg("Url: " + reponse.url + " -> Parser: " + parser.website, level=log.DEBUG)
return parser.parse(reponse)
return None
def parse(self, reponse):
for parser in self.__parsers:
if re.match(parser.website, reponse.url):
log.msg("Url: " + reponse.url + " -> Parser: " + parser.website, level=log.DEBUG)
return parser.parse(reponse)
return None
def get_synonym_requests(self, compound):
requests = []
for parser in self.__parsers:
requests.append(parser.new_compound_request(compound))
return requests
def get_synonym_requests(self, compound):
requests = []
for parser in self.__parsers:
requests.append(parser.new_compound_request(compound))
return requests
def start_requests(self):
requests = []
for synonym in self.synonyms:
requests.extend(self.get_synonym_requests(synonym))
return requests
def start_requests(self):
requests = []
for synonym in self.synonyms:
requests.extend(self.get_synonym_requests(synonym))
return requests
def add_parsers(self, parsers):
for parser in parsers:
self.add_parser(parser)
def add_parsers(self, parsers):
for parser in parsers:
self.add_parser(parser)
def add_parser(self, parser):
self.__parsers.append(parser)
parser.set_spider(self)
def add_parser(self, parser):
self.__parsers.append(parser)
parser.set_spider(self)