Added CLI functionality to deal with attribute selection
This commit is contained in:
parent
2fcec009bb
commit
c705133194
12
fourmi.py
12
fourmi.py
@ -12,15 +12,17 @@ Usage:
|
|||||||
fourmi --version
|
fourmi --version
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
|
--attributes=<regex> Include only that match these regular expressions split by a comma. [default: .*]
|
||||||
-h --help Show this screen.
|
-h --help Show this screen.
|
||||||
--version Show version.
|
--version Show version.
|
||||||
--verbose Verbose logging output.
|
--verbose Verbose logging output.
|
||||||
--log=<file> Save log to an file.
|
--log=<file> Save log to an file.
|
||||||
-o <file> --output=<file> Output file [default: result.*format*]
|
-o <file> --output=<file> Output file [default: result.*format*]
|
||||||
-f <format> --format=<format> Output formats (supported: csv, json, jsonlines, xml) [default: jsonlines]
|
-f <format> --format=<format> Output formats (supported: csv, json, jsonlines, xml) [default: jsonlines]
|
||||||
--include=<sourcenames> Include only sources that match the regular these expressions split by a comma.
|
--include=<regex> Include only sources that match these regular expressions split by a comma.
|
||||||
--exclude=<sourcenames> Exclude the sources that match the regular these expressions split by a comma.
|
--exclude=<regex> Exclude the sources that match these regular expressions split by a comma.
|
||||||
"""
|
"""
|
||||||
|
import re
|
||||||
|
|
||||||
from twisted.internet import reactor
|
from twisted.internet import reactor
|
||||||
from scrapy.crawler import Crawler
|
from scrapy.crawler import Crawler
|
||||||
@ -32,8 +34,8 @@ from FourmiCrawler.spider import FourmiSpider
|
|||||||
from sourceloader import SourceLoader
|
from sourceloader import SourceLoader
|
||||||
|
|
||||||
|
|
||||||
def setup_crawler(searchable, settings, source_loader):
|
def setup_crawler(searchable, settings, source_loader, attributes):
|
||||||
spider = FourmiSpider(compound=searchable)
|
spider = FourmiSpider(compound=searchable, selected_attributes=attributes)
|
||||||
spider.add_parsers(source_loader.sources)
|
spider.add_parsers(source_loader.sources)
|
||||||
crawler = Crawler(settings)
|
crawler = Crawler(settings)
|
||||||
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
crawler.signals.connect(reactor.stop, signal=signals.spider_closed)
|
||||||
@ -74,7 +76,7 @@ def start_log(docopt_arguments):
|
|||||||
def search(docopt_arguments, source_loader):
|
def search(docopt_arguments, source_loader):
|
||||||
start_log(docopt_arguments)
|
start_log(docopt_arguments)
|
||||||
settings = scrapy_settings_manipulation(docopt_arguments)
|
settings = scrapy_settings_manipulation(docopt_arguments)
|
||||||
setup_crawler(docopt_arguments["<compound>"], settings, source_loader)
|
setup_crawler(docopt_arguments["<compound>"], settings, source_loader, docopt_arguments["--attributes"].split(','))
|
||||||
reactor.run()
|
reactor.run()
|
||||||
|
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user