利用scrapy 爬取知乎关注者信息并存取MySQL中。

pipeline.py源码

import pymysql

class ZhihuuserPipeline(object):
    def process_item(self, item, spider):
        yield item

class MysqlPipeline():
    def __init__(self, host, database, user, password, port):
        self.host = host
        self.database = database
        self.user = user
        self.password = password
        self.port = port

    @classmethod
    def from_crawler(cls, crawler):
        return cls(host=crawler.settings.get('MYSQL_HOST'),
                   database=crawler.settings.get('MYSQL_DATABASE'),
                   user=crawler.settings.get('MYSQL_USER'),
                   password=crawler.settings.get('MYSQL_PASSWORD'),
                   port=crawler.settings.get('MYSQL_PORT'),
                   )

    def open_spider(self, spider):
        self.db = pymysql.connect(self.host, self.user, self.password, self.database,
                                  charset='utf8', port=self.port)
        self.cursor = self.db.cursor()  # 获取游标

    def close_spider(self, spider):
        self.db.close()

    def process_item(self, item, spider):
        data= dict(item)
        keys = ', '.join(data.keys())
        # 利用join()函数把列名合并到一起
        values = ', '.join(['%s'] * len(data))
        sql = 'insert into  %s (%s) values (%s)' % (item.table, keys, values)
        self.cursor.execute(sql, tuple(data.values()))
        self.db.commit()

        return item

报警代码ValueError: dictionary update sequence element #0 has length 6; 2 is required

该如何解决

Csdn user default icon
上传中...
上传图片
插入图片
抄袭、复制答案,以达到刷声望分或其他目的的行为,在CSDN问答是严格禁止的,一经发现立刻封号。是时候展现真正的技术了!
立即提问