From 8b7cfac2dea5c92efaee14f1df4a5317232897c3 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 09:33:07 +0200 Subject: [PATCH 01/15] Added an new command to the CLI, implementation will follow. --- fourmi.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/fourmi.py b/fourmi.py index d8bc427..dcd15f0 100755 --- a/fourmi.py +++ b/fourmi.py @@ -5,6 +5,7 @@ Fourmi, an web scraper build to search specific information for a given compound Usage: fourmi search fourmi [options] search + fourmi list fourmi -h | --help fourmi --version @@ -84,11 +85,18 @@ def start_log(arguments): else: log.start(logstdout=True, loglevel=log.WARNING) - -if __name__ == '__main__': - arguments = docopt.docopt(__doc__, version='Fourmi - V0.1.0') +def search(arguments): start_log(arguments) settings = scrapy_settings_manipulation(arguments) setup_crawler([arguments[""]], settings) reactor.run() + +if __name__ == '__main__': + arguments = docopt.docopt(__doc__, version='Fourmi - V0.1.0') + print arguments + + if arguments["search"]: + search(arguments) + elif arguments["list"]: + load_parsers() #[todo] - this should just show the different sources. \ No newline at end of file From a06bf643f11ba38bbf66c4386d685ddd38a37081 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 10:14:29 +0200 Subject: [PATCH 02/15] Made sourceloader a class and implemented the listing of all sources --- fourmi.py | 6 ++++-- sourceloader.py | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 2 deletions(-) create mode 100644 sourceloader.py diff --git a/fourmi.py b/fourmi.py index dcd15f0..f421ba9 100755 --- a/fourmi.py +++ b/fourmi.py @@ -29,6 +29,7 @@ import docopt from FourmiCrawler.parsers.parser import Parser from FourmiCrawler.spider import FourmiSpider +from sourceloader import SourceLoader def load_parsers(rel_dir="FourmiCrawler/parsers"): @@ -94,9 +95,10 @@ def search(arguments): if __name__ == '__main__': arguments = docopt.docopt(__doc__, version='Fourmi - V0.1.0') - print arguments + loader = SourceLoader() if arguments["search"]: search(arguments) elif arguments["list"]: - load_parsers() #[todo] - this should just show the different sources. \ No newline at end of file + print "-== Available Sources ==-" + print str(loader) \ No newline at end of file diff --git a/sourceloader.py b/sourceloader.py new file mode 100644 index 0000000..abdb539 --- /dev/null +++ b/sourceloader.py @@ -0,0 +1,34 @@ +import inspect +import os +from FourmiCrawler.parsers.parser import Parser + + +class SourceLoader: + sources = [] + + def __init__(self, rel_dir="FourmiCrawler/parsers"): + path = os.path.dirname(os.path.abspath(__file__)) + path += "/" + rel_dir + known_parser = set() + + for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']: + mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py]) + classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))] + for cls in classes: + if issubclass(cls, Parser) and cls not in known_parser: + self.sources.append(cls()) # [review] - Would we ever need arguments for the parsers? + known_parser.add(cls) + + def include(self, source_names): + pass # [todo] - implement source inclusion. + + def exclude(self, source_names): + pass # [todo] - implement source exclusion. + + def __str__(self): + string = "" + for src in self.sources: + string += "Source: " + src.__class__.__name__ + string += " - " + string += "URI: " + src.website + "\n" + return string \ No newline at end of file From 7b57d861783c473e43aa585127fb5b9577c49e5f Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 10:36:46 +0200 Subject: [PATCH 03/15] Removed redundant source loader --- fourmi.py | 30 +++++------------------------- 1 file changed, 5 insertions(+), 25 deletions(-) diff --git a/fourmi.py b/fourmi.py index f421ba9..232f1f5 100755 --- a/fourmi.py +++ b/fourmi.py @@ -18,39 +18,19 @@ Options: -f --format= Output formats (supported: csv, json, jsonlines, xml) [default: jsonlines] """ -import os -import inspect - from twisted.internet import reactor from scrapy.crawler import Crawler from scrapy import log, signals from scrapy.utils.project import get_project_settings import docopt -from FourmiCrawler.parsers.parser import Parser from FourmiCrawler.spider import FourmiSpider from sourceloader import SourceLoader -def load_parsers(rel_dir="FourmiCrawler/parsers"): - path = os.path.dirname(os.path.abspath(__file__)) - path += "/" + rel_dir - parsers = [] - known_parser = set() - - for py in [f[:-3] for f in os.listdir(path) if f.endswith('.py') and f != '__init__.py']: - mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py]) - classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))] - for cls in classes: - if issubclass(cls, Parser) and cls not in known_parser: - parsers.append(cls()) # [review] - Would we ever need arguments for the parsers? - known_parser.add(cls) - return parsers - - -def setup_crawler(searchable, settings): +def setup_crawler(searchable, settings, loader): spider = FourmiSpider(compound=searchable) - spider.add_parsers(load_parsers()) + spider.add_parsers(loader.sources) crawler = Crawler(settings) crawler.signals.connect(reactor.stop, signal=signals.spider_closed) crawler.configure() @@ -86,10 +66,10 @@ def start_log(arguments): else: log.start(logstdout=True, loglevel=log.WARNING) -def search(arguments): +def search(arguments, loader): start_log(arguments) settings = scrapy_settings_manipulation(arguments) - setup_crawler([arguments[""]], settings) + setup_crawler([arguments[""]], settings, loader) reactor.run() @@ -98,7 +78,7 @@ if __name__ == '__main__': loader = SourceLoader() if arguments["search"]: - search(arguments) + search(arguments, loader) elif arguments["list"]: print "-== Available Sources ==-" print str(loader) \ No newline at end of file From d823c105e65a1cc4fa42f0a3b7d8173949667ebd Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 10:48:29 +0200 Subject: [PATCH 04/15] Implemented source inclusion --- sourceloader.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sourceloader.py b/sourceloader.py index abdb539..26f164d 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -1,5 +1,6 @@ import inspect import os +import re from FourmiCrawler.parsers.parser import Parser @@ -20,7 +21,10 @@ class SourceLoader: known_parser.add(cls) def include(self, source_names): - pass # [todo] - implement source inclusion. + new = [] + for name in source_names: + new.extend([src for src in self.sources if re.match(name, src.__class__.__name__)]) + self.sources = new def exclude(self, source_names): pass # [todo] - implement source exclusion. From e0e64bd65adc6189e25a7b8330f6fade8caaf342 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 11:03:59 +0200 Subject: [PATCH 05/15] Implemented source exclusion --- sourceloader.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sourceloader.py b/sourceloader.py index 26f164d..68c1832 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -27,7 +27,10 @@ class SourceLoader: self.sources = new def exclude(self, source_names): - pass # [todo] - implement source exclusion. + exclude = [] + for name in source_names: + exclude.extend([src for src in self.sources if re.match(source_names,src.__class__.__name__)]) + self.sources = [src for src in self.sources if src not in exclude] def __str__(self): string = "" From 92a74de9e01ac2dbbeb4300af3ebf01b1a1ba46c Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 16 Apr 2014 11:17:48 +0200 Subject: [PATCH 06/15] Added the include and exclude options. --- fourmi.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/fourmi.py b/fourmi.py index 232f1f5..2b36b08 100755 --- a/fourmi.py +++ b/fourmi.py @@ -16,6 +16,8 @@ Options: --log= Save log to an file. -o --output= Output file [default: result.*format*] -f --format= Output formats (supported: csv, json, jsonlines, xml) [default: jsonlines] + --include= Include only sources that match the regular these expressions split by a comma. + --exclude= Exclude the sources that match the regular these expressions split by a comma. """ from twisted.internet import reactor From 02e6f293488332f269a1b4fec31041eb43358260 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:27:14 +0200 Subject: [PATCH 07/15] Include and Exclude fix after testing --- sourceloader.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sourceloader.py b/sourceloader.py index 68c1832..bb0d81a 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -21,15 +21,16 @@ class SourceLoader: known_parser.add(cls) def include(self, source_names): - new = [] + print source_names + new = set() for name in source_names: - new.extend([src for src in self.sources if re.match(name, src.__class__.__name__)]) - self.sources = new + new.update([src for src in self.sources if re.match(name, src.__class__.__name__)]) + self.sources = list(new) def exclude(self, source_names): exclude = [] for name in source_names: - exclude.extend([src for src in self.sources if re.match(source_names,src.__class__.__name__)]) + exclude.extend([src for src in self.sources if re.match(name, src.__class__.__name__)]) self.sources = [src for src in self.sources if src not in exclude] def __str__(self): @@ -37,5 +38,5 @@ class SourceLoader: for src in self.sources: string += "Source: " + src.__class__.__name__ string += " - " - string += "URI: " + src.website + "\n" + string += "URI: " + src.website + "\n" return string \ No newline at end of file From 12eb340042b55e3d12aa4f554be9247c414608c4 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:27:51 +0200 Subject: [PATCH 08/15] Added the right commands to include and exclude a source --- fourmi.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/fourmi.py b/fourmi.py index 6840a5f..e2f702b 100755 --- a/fourmi.py +++ b/fourmi.py @@ -6,6 +6,7 @@ Usage: fourmi search fourmi [options] search fourmi list + fourmi [--include= | --exclude=] list fourmi -h | --help fourmi --version @@ -80,6 +81,11 @@ if __name__ == '__main__': arguments = docopt.docopt(__doc__, version='Fourmi - V0.1.0') loader = SourceLoader() + if arguments["--include"]: + loader.include(arguments["--include"].split(',')) + elif arguments["--exclude"]: + loader.exclude(arguments["--exclude"].split(',')) + if arguments["search"]: search(arguments, loader) elif arguments["list"]: From 2cb6c147044485e4ae3d1a354052814c1a24e1c5 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:42:18 +0200 Subject: [PATCH 09/15] CLI accessability and cleanup --- fourmi.py | 43 ++++++++++++++++++++++--------------------- sourceloader.py | 1 - 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/fourmi.py b/fourmi.py index e2f702b..19ed150 100755 --- a/fourmi.py +++ b/fourmi.py @@ -5,6 +5,7 @@ Fourmi, a web scraper build to search specific information for a given compound Usage: fourmi search fourmi [options] search + fourmi [options] [--include= | --exclude=] search fourmi list fourmi [--include= | --exclude=] list fourmi -h | --help @@ -31,9 +32,9 @@ from FourmiCrawler.spider import FourmiSpider from sourceloader import SourceLoader -def setup_crawler(searchable, settings, loader): +def setup_crawler(searchable, settings, source_loader): spider = FourmiSpider(compound=searchable) - spider.add_parsers(loader.sources) + spider.add_parsers(source_loader.sources) crawler = Crawler(settings) crawler.signals.connect(reactor.stop, signal=signals.spider_closed) crawler.configure() @@ -41,39 +42,39 @@ def setup_crawler(searchable, settings, loader): crawler.start() -def scrapy_settings_manipulation(arguments): +def scrapy_settings_manipulation(docopt_arguments): settings = get_project_settings() - - if arguments["--output"] != 'result.*format*': - settings.overrides["FEED_URI"] = arguments["--output"] - elif arguments["--format"] == "jsonlines": + # [todo] - add at least a warning for files that already exist + if docopt_arguments["--output"] != 'result.*format*': + settings.overrides["FEED_URI"] = docopt_arguments["--output"] + elif docopt_arguments["--format"] == "jsonlines": settings.overrides["FEED_URI"] = "results.json" - elif arguments["--format"] is not None: - settings.overrides["FEED_URI"] = "results." + arguments["--format"] + elif docopt_arguments["--format"] is not None: + settings.overrides["FEED_URI"] = "results." + docopt_arguments["--format"] - if arguments["--format"] is not None: - settings.overrides["FEED_FORMAT"] = arguments["--format"] + if docopt_arguments["--format"] is not None: + settings.overrides["FEED_FORMAT"] = docopt_arguments["--format"] return settings -def start_log(arguments): - if arguments["--log"] is not None: - if arguments["--verbose"]: - log.start(logfile=arguments["--log"], logstdout=False, loglevel=log.DEBUG) +def start_log(docopt_arguments): + if docopt_arguments["--log"] is not None: + if docopt_arguments["--verbose"]: + log.start(logfile=docopt_arguments["--log"], logstdout=False, loglevel=log.DEBUG) else: - log.start(logfile=arguments["--log"], logstdout=True, loglevel=log.WARNING) + log.start(logfile=docopt_arguments["--log"], logstdout=True, loglevel=log.WARNING) else: - if arguments["--verbose"]: + if docopt_arguments["--verbose"]: log.start(logstdout=False, loglevel=log.DEBUG) else: log.start(logstdout=True, loglevel=log.WARNING) -def search(arguments, loader): - start_log(arguments) - settings = scrapy_settings_manipulation(arguments) - setup_crawler(arguments[""], settings, loader) +def search(docopt_arguments, source_loader): + start_log(docopt_arguments) + settings = scrapy_settings_manipulation(docopt_arguments) + setup_crawler(docopt_arguments[""], settings, source_loader) reactor.run() diff --git a/sourceloader.py b/sourceloader.py index bb0d81a..2eff6c1 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -21,7 +21,6 @@ class SourceLoader: known_parser.add(cls) def include(self, source_names): - print source_names new = set() for name in source_names: new.update([src for src in self.sources if re.match(name, src.__class__.__name__)]) From 662ee8f49013aa07ded2e3b89216a135a90b4f59 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:49:03 +0200 Subject: [PATCH 10/15] Renamed folder --- FourmiCrawler/{parsers => sources}/ChemSpider.py | 0 FourmiCrawler/{parsers => sources}/__init__.py | 0 FourmiCrawler/{parsers => sources}/parser.py | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename FourmiCrawler/{parsers => sources}/ChemSpider.py (100%) rename FourmiCrawler/{parsers => sources}/__init__.py (100%) rename FourmiCrawler/{parsers => sources}/parser.py (100%) diff --git a/FourmiCrawler/parsers/ChemSpider.py b/FourmiCrawler/sources/ChemSpider.py similarity index 100% rename from FourmiCrawler/parsers/ChemSpider.py rename to FourmiCrawler/sources/ChemSpider.py diff --git a/FourmiCrawler/parsers/__init__.py b/FourmiCrawler/sources/__init__.py similarity index 100% rename from FourmiCrawler/parsers/__init__.py rename to FourmiCrawler/sources/__init__.py diff --git a/FourmiCrawler/parsers/parser.py b/FourmiCrawler/sources/parser.py similarity index 100% rename from FourmiCrawler/parsers/parser.py rename to FourmiCrawler/sources/parser.py From 1e24453a1152417a2636c042ff9ee61660f31a03 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:51:03 +0200 Subject: [PATCH 11/15] Renamed filename of basic source class --- FourmiCrawler/sources/{parser.py => source.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename FourmiCrawler/sources/{parser.py => source.py} (100%) diff --git a/FourmiCrawler/sources/parser.py b/FourmiCrawler/sources/source.py similarity index 100% rename from FourmiCrawler/sources/parser.py rename to FourmiCrawler/sources/source.py From e18e4b4b26b559e9e605065073d260a87756f408 Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:55:38 +0200 Subject: [PATCH 12/15] Resolved all references to the old folder --- FourmiCrawler/sources/ChemSpider.py | 4 ++-- sourceloader.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/FourmiCrawler/sources/ChemSpider.py b/FourmiCrawler/sources/ChemSpider.py index 3273107..42abc22 100644 --- a/FourmiCrawler/sources/ChemSpider.py +++ b/FourmiCrawler/sources/ChemSpider.py @@ -1,4 +1,4 @@ -from parser import Parser +from source import Parser from scrapy import log from scrapy.http import Request from scrapy.selector import Selector @@ -18,7 +18,7 @@ class ChemSpider(Parser): """ def __init__(self): - pass + Parser.__init__(self) website = 'http://www.chemspider.com/*' diff --git a/sourceloader.py b/sourceloader.py index 2eff6c1..f380721 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -1,13 +1,13 @@ import inspect import os import re -from FourmiCrawler.parsers.parser import Parser +from FourmiCrawler.sources.source import Parser class SourceLoader: sources = [] - def __init__(self, rel_dir="FourmiCrawler/parsers"): + def __init__(self, rel_dir="FourmiCrawler/sources"): path = os.path.dirname(os.path.abspath(__file__)) path += "/" + rel_dir known_parser = set() From 90f03734a66235eb0bac9b4a7e9366ca6b2008fc Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 15:57:10 +0200 Subject: [PATCH 13/15] Refractored classname --- FourmiCrawler/sources/ChemSpider.py | 6 +++--- FourmiCrawler/sources/source.py | 2 +- FourmiCrawler/spider.py | 2 +- sourceloader.py | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/FourmiCrawler/sources/ChemSpider.py b/FourmiCrawler/sources/ChemSpider.py index 42abc22..a62f6dd 100644 --- a/FourmiCrawler/sources/ChemSpider.py +++ b/FourmiCrawler/sources/ChemSpider.py @@ -1,4 +1,4 @@ -from source import Parser +from source import Source from scrapy import log from scrapy.http import Request from scrapy.selector import Selector @@ -8,7 +8,7 @@ import re # [TODO] - Maybe clean up usage of '.extract()[0]', because of possible IndexError exception. -class ChemSpider(Parser): +class ChemSpider(Source): """ChemSpider scraper for synonyms and properties This parser will manage searching for chemicals through the @@ -18,7 +18,7 @@ class ChemSpider(Parser): """ def __init__(self): - Parser.__init__(self) + Source.__init__(self) website = 'http://www.chemspider.com/*' diff --git a/FourmiCrawler/sources/source.py b/FourmiCrawler/sources/source.py index feb4535..3c51724 100644 --- a/FourmiCrawler/sources/source.py +++ b/FourmiCrawler/sources/source.py @@ -2,7 +2,7 @@ from scrapy import log # from scrapy.http import Request -class Parser: +class Source: website = "http://something/*" # Regex of URI's the source is able to parse _spider = None diff --git a/FourmiCrawler/spider.py b/FourmiCrawler/spider.py index 77b2c11..9f92a84 100644 --- a/FourmiCrawler/spider.py +++ b/FourmiCrawler/spider.py @@ -15,7 +15,7 @@ class FourmiSpider(Spider): def parse(self, reponse): for parser in self.__parsers: if re.match(parser.website, reponse.url): - log.msg("Url: " + reponse.url + " -> Parser: " + parser.website, level=log.DEBUG) + log.msg("Url: " + reponse.url + " -> Source: " + parser.website, level=log.DEBUG) return parser.parse(reponse) return None diff --git a/sourceloader.py b/sourceloader.py index f380721..9957a70 100644 --- a/sourceloader.py +++ b/sourceloader.py @@ -1,7 +1,7 @@ import inspect import os import re -from FourmiCrawler.sources.source import Parser +from FourmiCrawler.sources.source import Source class SourceLoader: @@ -16,7 +16,7 @@ class SourceLoader: mod = __import__('.'.join([rel_dir.replace("/", "."), py]), fromlist=[py]) classes = [getattr(mod, x) for x in dir(mod) if inspect.isclass(getattr(mod, x))] for cls in classes: - if issubclass(cls, Parser) and cls not in known_parser: + if issubclass(cls, Source) and cls not in known_parser: self.sources.append(cls()) # [review] - Would we ever need arguments for the parsers? known_parser.add(cls) From e167c6f16759271b10a87e81865bb87518714cef Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 16:07:27 +0200 Subject: [PATCH 14/15] Added the MIT License --- LICENSE | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..59c504f --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Ivo B. Rietveld + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file From 68a1fe3717b69ecb4cdb30f430641449e3ccc32f Mon Sep 17 00:00:00 2001 From: "Jip J. Dekker" Date: Wed, 23 Apr 2014 16:09:09 +0200 Subject: [PATCH 15/15] Bumped the version number --- fourmi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fourmi.py b/fourmi.py index 19ed150..ad27520 100755 --- a/fourmi.py +++ b/fourmi.py @@ -79,7 +79,7 @@ def search(docopt_arguments, source_loader): if __name__ == '__main__': - arguments = docopt.docopt(__doc__, version='Fourmi - V0.1.0') + arguments = docopt.docopt(__doc__, version='Fourmi - V0.2.5') loader = SourceLoader() if arguments["--include"]: