diff --git a/FourmiCrawler/pipelines.py b/FourmiCrawler/pipelines.py index e1dadbf..2c775f2 100644 --- a/FourmiCrawler/pipelines.py +++ b/FourmiCrawler/pipelines.py @@ -5,6 +5,22 @@ import re from scrapy.exceptions import DropItem +class RemoveNonePipeline(object): + + def __init__(self): + self.known_values = set() + + def process_item(self, item, spider): + """ + Processing the items so None values are replaced by empty strings + :param item: The incoming item + :param spider: The spider which scraped the spider + :return: :raise DropItem: Returns the item if unique or drops them if it's already known + """ + for key in item: + if item[key] is None: + item[key] = "" + return item class DuplicatePipeline(object): diff --git a/FourmiCrawler/settings.py b/FourmiCrawler/settings.py index d7ac212..be7c451 100644 --- a/FourmiCrawler/settings.py +++ b/FourmiCrawler/settings.py @@ -11,8 +11,9 @@ BOT_NAME = 'FourmiCrawler' SPIDER_MODULES = ['FourmiCrawler'] NEWSPIDER_MODULE = 'FourmiCrawler' ITEM_PIPELINES = { - 'FourmiCrawler.pipelines.AttributeSelectionPipeline': 100, - 'FourmiCrawler.pipelines.DuplicatePipeline': 200, + "FourmiCrawler.pipelines.RemoveNonePipeline": 100, + 'FourmiCrawler.pipelines.AttributeSelectionPipeline': 200, + 'FourmiCrawler.pipelines.DuplicatePipeline': 300, } FEED_URI = 'results.json' FEED_FORMAT = 'jsonlines'