Archived
1
0

Logging facility is working again.

This commit is contained in:
Jip J. Dekker 2014-06-15 20:09:40 +02:00
parent f604c3efcc
commit 3ea950b936

View File

@ -26,7 +26,7 @@ Options:
from twisted.internet import reactor from twisted.internet import reactor
from scrapy.crawler import Crawler from scrapy.crawler import Crawler
from scrapy import signals from scrapy import signals, log
import docopt import docopt
from FourmiCrawler.spider import FourmiSpider from FourmiCrawler.spider import FourmiSpider
@ -61,6 +61,7 @@ def search(docopt_arguments, source_loader):
conf.start_log(docopt_arguments["--log"], docopt_arguments["-v"]) conf.start_log(docopt_arguments["--log"], docopt_arguments["-v"])
conf.set_output(docopt_arguments["--output"], docopt_arguments["--format"]) conf.set_output(docopt_arguments["--output"], docopt_arguments["--format"])
setup_crawler(docopt_arguments["<compound>"], conf.scrapy_settings, source_loader, docopt_arguments["--attributes"].split(',')) setup_crawler(docopt_arguments["<compound>"], conf.scrapy_settings, source_loader, docopt_arguments["--attributes"].split(','))
log.start(conf.scrapy_settings.get("LOG_FILE"), conf.scrapy_settings.get("LOG_LEVEL"), conf.scrapy_settings.get("LOG_STDOUT"))
reactor.run() reactor.run()