Merge branch 'feature/code-cleanup' into develop
This commit is contained in:
commit
3c6412cc05
@ -1,6 +1,4 @@
|
|||||||
# Define here the models for your scraped items
|
# For more information on item definitions, see the Scrapy documentation in:
|
||||||
#
|
|
||||||
# See documentation in:
|
|
||||||
# http://doc.scrapy.org/en/latest/topics/items.html
|
# http://doc.scrapy.org/en/latest/topics/items.html
|
||||||
|
|
||||||
from scrapy.item import Item, Field
|
from scrapy.item import Item, Field
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
# Define your item pipelines here
|
# For more information on item pipelines, see the Scrapy documentation in:
|
||||||
#
|
# http://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
||||||
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
|
|
||||||
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from scrapy.exceptions import DropItem
|
from scrapy.exceptions import DropItem
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,15 +7,32 @@ class Source:
|
|||||||
_spider = None
|
_spider = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
"""
|
||||||
|
Initiation of a new Source
|
||||||
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def parse(self, reponse):
|
def parse(self, response):
|
||||||
log.msg("The parse function of the empty parser was used.", level=log.WARNING)
|
"""
|
||||||
|
This function should be able to parse all Scrapy Response objects with a URL matching the website Regex.
|
||||||
|
:param response: A Scrapy Response object
|
||||||
|
:return: A list of Result items and new Scrapy Requests
|
||||||
|
"""
|
||||||
|
log.msg("The parse function of the empty source was used.", level=log.WARNING)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def new_compound_request(self, compound):
|
def new_compound_request(self, compound):
|
||||||
|
"""
|
||||||
|
This function should return a Scrapy Request for the given compound request.
|
||||||
|
:param compound: A compound name.
|
||||||
|
:return: A new Scrapy Request
|
||||||
|
"""
|
||||||
# return Request(url=self.website[:-1] + compound, callback=self.parse)
|
# return Request(url=self.website[:-1] + compound, callback=self.parse)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def set_spider(self, spider):
|
def set_spider(self, spider):
|
||||||
|
"""
|
||||||
|
A Function to save the associated spider.
|
||||||
|
:param spider: A FourmiSpider object
|
||||||
|
"""
|
||||||
self._spider = spider
|
self._spider = spider
|
||||||
|
@ -1,43 +1,75 @@
|
|||||||
|
import re
|
||||||
|
|
||||||
from scrapy.spider import Spider
|
from scrapy.spider import Spider
|
||||||
from scrapy import log
|
from scrapy import log
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
class FourmiSpider(Spider):
|
class FourmiSpider(Spider):
|
||||||
|
"""
|
||||||
|
A spider writen for the Fourmi Project which calls upon all available sources to request and scrape data.
|
||||||
|
"""
|
||||||
name = "FourmiSpider"
|
name = "FourmiSpider"
|
||||||
__parsers = []
|
__sources = []
|
||||||
synonyms = []
|
synonyms = []
|
||||||
|
|
||||||
def __init__(self, compound=None, selected_attributes=[".*"], *args, **kwargs):
|
def __init__(self, compound=None, selected_attributes=[".*"], *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Initiation of the Spider
|
||||||
|
:param compound: compound that will be searched.
|
||||||
|
:param selected_attributes: A list of regular expressions that the attributes should match.
|
||||||
|
"""
|
||||||
super(FourmiSpider, self).__init__(*args, **kwargs)
|
super(FourmiSpider, self).__init__(*args, **kwargs)
|
||||||
self.synonyms.append(compound)
|
self.synonyms.append(compound)
|
||||||
self.selected_attributes = selected_attributes;
|
self.selected_attributes = selected_attributes;
|
||||||
|
|
||||||
def parse(self, reponse):
|
def parse(self, response):
|
||||||
for parser in self.__parsers:
|
"""
|
||||||
if re.match(parser.website, reponse.url):
|
The function that is called when a response to a request is available. This function distributes this to a
|
||||||
log.msg("Url: " + reponse.url + " -> Source: " + parser.website, level=log.DEBUG)
|
source which should be able to handle parsing the data.
|
||||||
return parser.parse(reponse)
|
:param response: A Scrapy Response object that should be parsed
|
||||||
|
:return: A list of Result items and new Request to be handled by the scrapy core.
|
||||||
|
"""
|
||||||
|
for source in self.__sources:
|
||||||
|
if re.match(source.website, response.url):
|
||||||
|
log.msg("Url: " + response.url + " -> Source: " + source.website, level=log.DEBUG)
|
||||||
|
return source.parse(response)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_synonym_requests(self, compound):
|
def get_synonym_requests(self, compound):
|
||||||
|
"""
|
||||||
|
A function that generates new Scrapy Request for each source given a new synonym of a compound.
|
||||||
|
:param compound: A compound name
|
||||||
|
:return: A list of Scrapy Request objects
|
||||||
|
"""
|
||||||
requests = []
|
requests = []
|
||||||
for parser in self.__parsers:
|
for parser in self.__sources:
|
||||||
parser_requests = parser.new_compound_request(compound)
|
parser_requests = parser.new_compound_request(compound)
|
||||||
if parser_requests is not None:
|
if parser_requests is not None:
|
||||||
requests.append(parser_requests)
|
requests.append(parser_requests)
|
||||||
return requests
|
return requests
|
||||||
|
|
||||||
def start_requests(self):
|
def start_requests(self):
|
||||||
|
"""
|
||||||
|
The function called by Scrapy for it's first Requests
|
||||||
|
:return: A list of Scrapy Request generated from the known synonyms using the available sources.
|
||||||
|
"""
|
||||||
requests = []
|
requests = []
|
||||||
for synonym in self.synonyms:
|
for synonym in self.synonyms:
|
||||||
requests.extend(self.get_synonym_requests(synonym))
|
requests.extend(self.get_synonym_requests(synonym))
|
||||||
return requests
|
return requests
|
||||||
|
|
||||||
def add_parsers(self, parsers):
|
def add_sources(self, sources):
|
||||||
for parser in parsers:
|
"""
|
||||||
self.add_parser(parser)
|
A function to add a new Parser objects to the list of available sources.
|
||||||
|
:param sources: A list of Source Objects.
|
||||||
|
"""
|
||||||
|
for parser in sources:
|
||||||
|
self.add_source(parser)
|
||||||
|
|
||||||
def add_parser(self, parser):
|
def add_source(self, source):
|
||||||
self.__parsers.append(parser)
|
"""
|
||||||
parser.set_spider(self)
|
A function add a new Parser object to the list of available parsers.
|
||||||
|
:param source: A Source Object
|
||||||
|
"""
|
||||||
|
self.__sources.append(source)
|
||||||
|
source.set_spider(self)
|
30
fourmi.py
30
fourmi.py
@ -33,9 +33,16 @@ from FourmiCrawler.spider import FourmiSpider
|
|||||||
from sourceloader import SourceLoader
|
from sourceloader import SourceLoader
|
||||||
|
|
||||||
|
|
||||||
def setup_crawler(searchable, settings, source_loader, attributes):
|
def setup_crawler(compound, settings, source_loader, attributes):
|
||||||
spider = FourmiSpider(compound=searchable, selected_attributes=attributes)
|
"""
|
||||||
spider.add_parsers(source_loader.sources)
|
This function prepares and start the crawler which starts the actual search on the internet
|
||||||
|
:param compound: The compound which should be searched
|
||||||
|
:param settings: A scrapy settings object
|
||||||
|
:param source_loader: A fully functional SourceLoader object which contains only the sources that should be used.
|
||||||
|
:param attributes: A list of regular expressions which the attribute names should match.
|
||||||
|
"""
|
||||||
|
spider = FourmiSpider(compound=compound, selected_attributes=attributes)
|
||||||
|
spider.add_sources(source_loader.sources)
|
||||||
crawler = Crawler(settings)
|
crawler = Crawler(settings)
|
||||||
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
||||||
crawler.configure()
|
crawler.configure()
|
||||||
@ -44,8 +51,13 @@ def setup_crawler(searchable, settings, source_loader, attributes):
|
|||||||
|
|
||||||
|
|
||||||
def scrapy_settings_manipulation(docopt_arguments):
|
def scrapy_settings_manipulation(docopt_arguments):
|
||||||
|
"""
|
||||||
|
This function manipulates the Scrapy settings that normally would be set in the settings file. In the Fourmi
|
||||||
|
project these are command line arguments.
|
||||||
|
:param docopt_arguments: A dictionary generated by docopt containing all CLI arguments.
|
||||||
|
"""
|
||||||
settings = get_project_settings()
|
settings = get_project_settings()
|
||||||
# [todo] - add at least a warning for files that already exist
|
|
||||||
if docopt_arguments["--output"] != 'result.*format*':
|
if docopt_arguments["--output"] != 'result.*format*':
|
||||||
settings.overrides["FEED_URI"] = docopt_arguments["--output"]
|
settings.overrides["FEED_URI"] = docopt_arguments["--output"]
|
||||||
elif docopt_arguments["--format"] == "jsonlines":
|
elif docopt_arguments["--format"] == "jsonlines":
|
||||||
@ -60,6 +72,10 @@ def scrapy_settings_manipulation(docopt_arguments):
|
|||||||
|
|
||||||
|
|
||||||
def start_log(docopt_arguments):
|
def start_log(docopt_arguments):
|
||||||
|
"""
|
||||||
|
This function starts the logging functionality of Scrapy using the settings given by the CLI.
|
||||||
|
:param docopt_arguments: A dictionary generated by docopt containing all CLI arguments.
|
||||||
|
"""
|
||||||
if docopt_arguments["--log"] is not None:
|
if docopt_arguments["--log"] is not None:
|
||||||
if docopt_arguments["--verbose"]:
|
if docopt_arguments["--verbose"]:
|
||||||
log.start(logfile=docopt_arguments["--log"], logstdout=False, loglevel=log.DEBUG)
|
log.start(logfile=docopt_arguments["--log"], logstdout=False, loglevel=log.DEBUG)
|
||||||
@ -73,12 +89,18 @@ def start_log(docopt_arguments):
|
|||||||
|
|
||||||
|
|
||||||
def search(docopt_arguments, source_loader):
|
def search(docopt_arguments, source_loader):
|
||||||
|
"""
|
||||||
|
The function that facilitates the search for a specific compound.
|
||||||
|
:param docopt_arguments: A dictionary generated by docopt containing all CLI arguments.
|
||||||
|
:param source_loader: An initiated SourceLoader object pointed at the directory with the sources.
|
||||||
|
"""
|
||||||
start_log(docopt_arguments)
|
start_log(docopt_arguments)
|
||||||
settings = scrapy_settings_manipulation(docopt_arguments)
|
settings = scrapy_settings_manipulation(docopt_arguments)
|
||||||
setup_crawler(docopt_arguments["<compound>"], settings, source_loader, docopt_arguments["--attributes"].split(','))
|
setup_crawler(docopt_arguments["<compound>"], settings, source_loader, docopt_arguments["--attributes"].split(','))
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
# The start for the Fourmi Command Line interface.
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
arguments = docopt.docopt(__doc__, version='Fourmi - V0.3.0')
|
arguments = docopt.docopt(__doc__, version='Fourmi - V0.3.0')
|
||||||
loader = SourceLoader()
|
loader = SourceLoader()
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from FourmiCrawler.sources.source import Source
|
from FourmiCrawler.sources.source import Source
|
||||||
|
|
||||||
|
|
||||||
@ -8,6 +9,10 @@ class SourceLoader:
|
|||||||
sources = []
|
sources = []
|
||||||
|
|
||||||
def __init__(self, rel_dir="FourmiCrawler/sources"):
|
def __init__(self, rel_dir="FourmiCrawler/sources"):
|
||||||
|
"""
|
||||||
|
The initiation of a SourceLoader, selects and indexes a directory for usable sources.
|
||||||
|
:param rel_dir: A relative path to a directory.
|
||||||
|
"""
|
||||||
path = os.path.dirname(os.path.abspath(__file__))
|
path = os.path.dirname(os.path.abspath(__file__))
|
||||||
path += "/" + rel_dir
|
path += "/" + rel_dir
|
||||||
known_parser = set()
|
known_parser = set()
|
||||||
@ -21,18 +26,30 @@ class SourceLoader:
|
|||||||
known_parser.add(cls)
|
known_parser.add(cls)
|
||||||
|
|
||||||
def include(self, source_names):
|
def include(self, source_names):
|
||||||
|
"""
|
||||||
|
This function excludes all sources that don't match the given regular expressions.
|
||||||
|
:param source_names: A list of regular expression (strings)
|
||||||
|
"""
|
||||||
new = set()
|
new = set()
|
||||||
for name in source_names:
|
for name in source_names:
|
||||||
new.update([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
new.update([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
||||||
self.sources = list(new)
|
self.sources = list(new)
|
||||||
|
|
||||||
def exclude(self, source_names):
|
def exclude(self, source_names):
|
||||||
|
"""
|
||||||
|
This function excludes all sources that match the given regular expressions.
|
||||||
|
:param source_names: A list of regular expression (strings)
|
||||||
|
"""
|
||||||
exclude = []
|
exclude = []
|
||||||
for name in source_names:
|
for name in source_names:
|
||||||
exclude.extend([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
exclude.extend([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
||||||
self.sources = [src for src in self.sources if src not in exclude]
|
self.sources = [src for src in self.sources if src not in exclude]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
"""
|
||||||
|
This function returns a string with all sources currently available in the SourceLoader.
|
||||||
|
:return: a string with all available sources.
|
||||||
|
"""
|
||||||
string = ""
|
string = ""
|
||||||
for src in self.sources:
|
for src in self.sources:
|
||||||
string += "Source: " + src.__class__.__name__
|
string += "Source: " + src.__class__.__name__
|
||||||
|
Reference in New Issue
Block a user