Archived
1
0

Small fix to ensure unique classes and load all parsers

This commit is contained in:
Jip J. Dekker 2014-04-08 11:43:32 +02:00
parent da17a149c0
commit 622dd4ad00

View File

@ -8,6 +8,7 @@ Version: v0.0.1 - Empty Application that could do something but all logic of web
from twisted.internet import reactor
from scrapy.crawler import Crawler
from scrapy import log, signals
from FourmiCrawler.parsers.parser import Parser
from FourmiCrawler.spider import FourmiSpider
from scrapy.utils.project import get_project_settings
import os, inspect, re
@ -16,13 +17,15 @@ def load_parsers(rel_dir="FourmiCrawler/parsers"):
path = os.path.dirname(os.path.abspath(__file__))
path += "/" + rel_dir
parsers = []
known_parser = set()
for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']:
mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py])
classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))]
for cls in classes:
if re.match(path + "/*", inspect.getfile(cls)):
if issubclass(cls, Parser) and cls not in known_parser:
parsers.append(cls()) # [review] - Would we ever need arguments for the parsers?
known_parser.add(cls)
return parsers
def setup_crawler(searchables):