From da17a149c04437e42246eb11a0b0f3c7d9a13e9b Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Tue, 8 Apr 2014 11:42:43 +0200 Subject: [PATCH 1/2] Spider is now able to handle none-request from parsers while handling new compounds --- FourmiCrawler/spider.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/FourmiCrawler/spider.py b/FourmiCrawler/spider.py index 42f1e15..327de42 100644 --- a/FourmiCrawler/spider.py +++ b/FourmiCrawler/spider.py @@ -25,7 +25,9 @@ class FourmiSpider(Spider): def get_synonym_requests(self, compound): requests = [] for parser in self.__parsers: - requests.append(parser.new_compound_request(compound)) + parser_requests = parser.new_compound_request(compound) + if parser_requests is not None: + requests.append(parser_requests) return requests def start_requests(self): From 622dd4ad002d9d9457bf3e55ad949eceb98710a2 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Tue, 8 Apr 2014 11:43:32 +0200 Subject: [PATCH 2/2] Small fix to ensure unique classes and load all parsers --- Fourmi.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Fourmi.py b/Fourmi.py index a0f54a4..7c3cf7d 100755 --- a/Fourmi.py +++ b/Fourmi.py @@ -8,6 +8,7 @@ Version: v0.0.1 - Empty Application that could do something but all logic of web from twisted.internet import reactor from scrapy.crawler import Crawler from scrapy import log, signals +from FourmiCrawler.parsers.parser import Parser from FourmiCrawler.spider import FourmiSpider from scrapy.utils.project import get_project_settings import os, inspect, re @@ -16,13 +17,15 @@ def load_parsers(rel_dir="FourmiCrawler/parsers"): path = os.path.dirname(os.path.abspath(__file__)) path += "/" + rel_dir parsers = [] + known_parser = set() for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']: mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py]) classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))] for cls in classes: - if re.match(path + "/*", inspect.getfile(cls)): + if issubclass(cls, Parser) and cls not in known_parser: parsers.append(cls()) # [review] - Would we ever need arguments for the parsers? + known_parser.add(cls) return parsers def setup_crawler(searchables):