diff --git a/Fourmi.py b/Fourmi.py index a0f54a4..7c3cf7d 100755 --- a/Fourmi.py +++ b/Fourmi.py @@ -8,6 +8,7 @@ Version: v0.0.1 - Empty Application that could do something but all logic of web from twisted.internet import reactor from scrapy.crawler import Crawler from scrapy import log, signals +from FourmiCrawler.parsers.parser import Parser from FourmiCrawler.spider import FourmiSpider from scrapy.utils.project import get_project_settings import os, inspect, re @@ -16,13 +17,15 @@ def load_parsers(rel_dir="FourmiCrawler/parsers"): path = os.path.dirname(os.path.abspath(__file__)) path += "/" + rel_dir parsers = [] + known_parser = set() for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']: mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py]) classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))] for cls in classes: - if re.match(path + "/*", inspect.getfile(cls)): + if issubclass(cls, Parser) and cls not in known_parser: parsers.append(cls()) # [review] - Would we ever need arguments for the parsers? + known_parser.add(cls) return parsers def setup_crawler(searchables): diff --git a/FourmiCrawler/spider.py b/FourmiCrawler/spider.py index 42f1e15..327de42 100644 --- a/FourmiCrawler/spider.py +++ b/FourmiCrawler/spider.py @@ -25,7 +25,9 @@ class FourmiSpider(Spider): def get_synonym_requests(self, compound): requests = [] for parser in self.__parsers: - requests.append(parser.new_compound_request(compound)) + parser_requests = parser.new_compound_request(compound) + if parser_requests is not None: + requests.append(parser_requests) return requests def start_requests(self):