CLI accessability and cleanup
This commit is contained in:
parent
12eb340042
commit
2cb6c14704
43
fourmi.py
43
fourmi.py
@ -5,6 +5,7 @@ Fourmi, a web scraper build to search specific information for a given compound
|
|||||||
Usage:
|
Usage:
|
||||||
fourmi search <compound>
|
fourmi search <compound>
|
||||||
fourmi [options] search <compound>
|
fourmi [options] search <compound>
|
||||||
|
fourmi [options] [--include=<sourcename> | --exclude=<sourcename>] search <compound>
|
||||||
fourmi list
|
fourmi list
|
||||||
fourmi [--include=<sourcename> | --exclude=<sourcename>] list
|
fourmi [--include=<sourcename> | --exclude=<sourcename>] list
|
||||||
fourmi -h | --help
|
fourmi -h | --help
|
||||||
@ -31,9 +32,9 @@ from FourmiCrawler.spider import FourmiSpider
|
|||||||
from sourceloader import SourceLoader
|
from sourceloader import SourceLoader
|
||||||
|
|
||||||
|
|
||||||
def setup_crawler(searchable, settings, loader):
|
def setup_crawler(searchable, settings, source_loader):
|
||||||
spider = FourmiSpider(compound=searchable)
|
spider = FourmiSpider(compound=searchable)
|
||||||
spider.add_parsers(loader.sources)
|
spider.add_parsers(source_loader.sources)
|
||||||
crawler = Crawler(settings)
|
crawler = Crawler(settings)
|
||||||
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
||||||
crawler.configure()
|
crawler.configure()
|
||||||
@ -41,39 +42,39 @@ def setup_crawler(searchable, settings, loader):
|
|||||||
crawler.start()
|
crawler.start()
|
||||||
|
|
||||||
|
|
||||||
def scrapy_settings_manipulation(arguments):
|
def scrapy_settings_manipulation(docopt_arguments):
|
||||||
settings = get_project_settings()
|
settings = get_project_settings()
|
||||||
|
# [todo] - add at least a warning for files that already exist
|
||||||
if arguments["--output"] != 'result.*format*':
|
if docopt_arguments["--output"] != 'result.*format*':
|
||||||
settings.overrides["FEED_URI"] = arguments["--output"]
|
settings.overrides["FEED_URI"] = docopt_arguments["--output"]
|
||||||
elif arguments["--format"] == "jsonlines":
|
elif docopt_arguments["--format"] == "jsonlines":
|
||||||
settings.overrides["FEED_URI"] = "results.json"
|
settings.overrides["FEED_URI"] = "results.json"
|
||||||
elif arguments["--format"] is not None:
|
elif docopt_arguments["--format"] is not None:
|
||||||
settings.overrides["FEED_URI"] = "results." + arguments["--format"]
|
settings.overrides["FEED_URI"] = "results." + docopt_arguments["--format"]
|
||||||
|
|
||||||
if arguments["--format"] is not None:
|
if docopt_arguments["--format"] is not None:
|
||||||
settings.overrides["FEED_FORMAT"] = arguments["--format"]
|
settings.overrides["FEED_FORMAT"] = docopt_arguments["--format"]
|
||||||
|
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
|
|
||||||
def start_log(arguments):
|
def start_log(docopt_arguments):
|
||||||
if arguments["--log"] is not None:
|
if docopt_arguments["--log"] is not None:
|
||||||
if arguments["--verbose"]:
|
if docopt_arguments["--verbose"]:
|
||||||
log.start(logfile=arguments["--log"], logstdout=False, loglevel=log.DEBUG)
|
log.start(logfile=docopt_arguments["--log"], logstdout=False, loglevel=log.DEBUG)
|
||||||
else:
|
else:
|
||||||
log.start(logfile=arguments["--log"], logstdout=True, loglevel=log.WARNING)
|
log.start(logfile=docopt_arguments["--log"], logstdout=True, loglevel=log.WARNING)
|
||||||
else:
|
else:
|
||||||
if arguments["--verbose"]:
|
if docopt_arguments["--verbose"]:
|
||||||
log.start(logstdout=False, loglevel=log.DEBUG)
|
log.start(logstdout=False, loglevel=log.DEBUG)
|
||||||
else:
|
else:
|
||||||
log.start(logstdout=True, loglevel=log.WARNING)
|
log.start(logstdout=True, loglevel=log.WARNING)
|
||||||
|
|
||||||
|
|
||||||
def search(arguments, loader):
|
def search(docopt_arguments, source_loader):
|
||||||
start_log(arguments)
|
start_log(docopt_arguments)
|
||||||
settings = scrapy_settings_manipulation(arguments)
|
settings = scrapy_settings_manipulation(docopt_arguments)
|
||||||
setup_crawler(arguments["<compound>"], settings, loader)
|
setup_crawler(docopt_arguments["<compound>"], settings, source_loader)
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
@ -21,7 +21,6 @@ class SourceLoader:
|
|||||||
known_parser.add(cls)
|
known_parser.add(cls)
|
||||||
|
|
||||||
def include(self, source_names):
|
def include(self, source_names):
|
||||||
print source_names
|
|
||||||
new = set()
|
new = set()
|
||||||
for name in source_names:
|
for name in source_names:
|
||||||
new.update([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
new.update([src for src in self.sources if re.match(name, src.__class__.__name__)])
|
||||||
|
Reference in New Issue
Block a user