Получение ошибки {KeyError}из self.files.pop (spider) в скрапе конвейера - PullRequest
0 голосов
/ 20 октября 2018

Когда я запускаю scrapy для экспорта нескольких csv из одного паука, я получаю ошибку {KeyError} из self.files.pop (spider).

Вот мой конвейер.

class PhysiciansPipeline(object):
    def __init__(self, spider):
        self.files = {}
        full_path = result_path(spider.result_path_type, spider.name)
        self.exporter1 = CsvItemExporter(fields_to_export=PhysiciansItem.fields.keys(),file=open(full_path + 'file1.csv','wb'))
        self.exporter2 = CsvItemExporter(fields_to_export=SpecialtiesItem.fields.keys(),file=open(full_path + 'file2.csv','wb'))
        self.exporter3 = CsvItemExporter(fields_to_export=LocationsItem.fields.keys(), file=open(full_path + 'file3.csv', 'wb'))

    @classmethod
    def from_crawler(cls, crawler):
        spider = crawler.spider
        pipeline = cls(spider)
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        self.exporter1.start_exporting()
        self.exporter2.start_exporting()
        self.exporter3.start_exporting()

    def spider_closed(self, spider):
        self.exporter1.finish_exporting()
        self.exporter2.finish_exporting()
        self.exporter3.finish_exporting()
        file = self.files.pop(spider)
        file.close()

    def process_item(self, item, spider):
        self.exporter1.export_item(item)
        self.exporter2.export_item(item)
        self.exporter3.export_item(item)
        return item

Также я добавил эту строку в settings.py

ITEM_PIPELINES = {
   'physicians.pipelines.PhysiciansPipeline': 300,
}

В чем проблема в этом коде?Спасибо.

1 Ответ

0 голосов
/ 20 октября 2018

Я не вижу никакого значения, добавляемого в self.files

Говоря об ошибке, это означает, что ключ spider не существует в self.files

Я думаю, выищите

self.files.pop(spider.name)

Редактировать:

class PhysiciansPipeline(object):
    def __init__(self, spider):
        self.files=[]
        self.full_path = result_path(spider.result_path_type, spider.name)
        file1 = open(full_path + 'physicians.csv','wb')
        self.files.extend([ file1 ])
        self.exporter1 = CsvItemExporter(fields_to_export=PhysiciansItem.fields.keys(),file=file1)

        file2= open(full_path + 'specialities.csv','wb')
        self.files.extend([ files2 ])
        self.exporter2 = CsvItemExporter(fields_to_export=SpecialtiesItem.fields.keys(),file=file2)

        file3 = open(full_path + 'locations.csv', 'wb')
        self.files.extend([ file3 ])
        self.exporter3 = CsvItemExporter(fields_to_export=LocationsItem.fields.keys(), file=file3)

    @classmethod
    def from_crawler(cls, crawler):
        spider = crawler.spider
        pipeline = cls(spider)
        crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
        crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
        return pipeline

    def spider_opened(self, spider):
        self.exporter1.start_exporting()
        self.exporter2.start_exporting()
        self.exporter3.start_exporting()

    def spider_closed(self, spider):
        self.exporter1.finish_exporting()
        self.exporter2.finish_exporting()
        self.exporter3.finish_exporting()
        for _file in self.files:
            _file.close()
        
        clean_csv(full_path)

    def process_item(self, item, spider):
        self.exporter1.export_item(item)
        self.exporter2.export_item(item)
        self.exporter3.export_item(item)
        return item
...