Archived
1
0

Added feed export functionality

This commit is contained in:
Jip J. Dekker 2014-04-15 19:40:54 +02:00
parent e65d3a6898
commit 61ca2520e3
2 changed files with 25 additions and 5 deletions

View File

@ -13,6 +13,9 @@ NEWSPIDER_MODULE = 'FourmiCrawler'
ITEM_PIPELINES = { ITEM_PIPELINES = {
'FourmiCrawler.pipelines.FourmiPipeline': 100 'FourmiCrawler.pipelines.FourmiPipeline': 100
} }
FEED_URI = 'results.json'
FEED_FORMAT = 'jsonlines'
# Crawl responsibly by identifying yourself (and your website) on the # Crawl responsibly by identifying yourself (and your website) on the
# user-agent # user-agent

View File

@ -14,7 +14,7 @@ Options:
--verbose Verbose logging output. --verbose Verbose logging output.
--log=<file> Save log to an file. --log=<file> Save log to an file.
-o <file> --output=<file> Output file [default: result.*format*] -o <file> --output=<file> Output file [default: result.*format*]
-f <format> --format=<format> Output formats [default: jsonlines | supported: csv, json, jsonlines, xml] -f <format> --format=<format> Output formats (supported: csv, json, jsonlines, xml) [default: jsonlines]
""" """
import os import os
@ -46,10 +46,9 @@ def load_parsers(rel_dir="FourmiCrawler/parsers"):
return parsers return parsers
def setup_crawler(searchable): def setup_crawler(searchable, settings):
spider = FourmiSpider(compound=searchable) spider = FourmiSpider(compound=searchable)
spider.add_parsers(load_parsers()) spider.add_parsers(load_parsers())
settings = get_project_settings()
crawler = Crawler(settings) crawler = Crawler(settings)
crawler.signals.connect(reactor.stop, signal=signals.spider_closed) crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
crawler.configure() crawler.configure()
@ -57,6 +56,22 @@ def setup_crawler(searchable):
crawler.start() crawler.start()
def scrapy_settings_manipulation(arguments):
settings = get_project_settings()
if arguments["--output"] != 'result.*format*':
settings.overrides["FEED_URI"] = arguments["--output"]
elif arguments["--format"] == "jsonlines":
settings.overrides["FEED_URI"] = "results.json"
elif arguments["--format"] is not None:
settings.overrides["FEED_URI"] = "results." + arguments["--format"]
if arguments["--format"] is not None:
settings.overrides["FEED_FORMAT"] = arguments["--format"]
return settings
def start_log(arguments): def start_log(arguments):
if arguments["--log"] is not None: if arguments["--log"] is not None:
if arguments["--verbose"]: if arguments["--verbose"]:
@ -73,6 +88,8 @@ def start_log(arguments):
if __name__ == '__main__': if __name__ == '__main__':
arguments = docopt.docopt(__doc__, version='Fourmi - V0.0.1a') arguments = docopt.docopt(__doc__, version='Fourmi - V0.0.1a')
start_log(arguments) start_log(arguments)
setup_crawler([arguments["<compound>"]]) print arguments
settings = scrapy_settings_manipulation(arguments)
setup_crawler([arguments["<compound>"]], settings)
reactor.run() reactor.run()