0

创建表时它工作正常,但根本不插入任何数据。有谁知道可能是什么问题?

这是我的管道

import sqlite3

class HelloPipeline(object):

    def open_spider(self, spider):#
        self.conn = sqlite3.connect("test.sqlite")
        self.cur = self.conn.cursor()
        self.cur.execute("create table if not exists test(test1 text, test2 text, test3 text,test4 text, test5 text, test6 text, test7 text);")        
        #pass

    def close_spider(self, spider):#
        self.conn.commit()
        self.conn.close()
        #pass

    def process_item(self, item, spider):#

        col = ",".join(item.keys())       
        placeholders = ",".join(len(item) * "?")
        sql = "insert into test({}) values({})"#
        self.cur.execute(sql.format(col, placeholders), item.values())

        return item

这是物品

import scrapy


class HelloItem(scrapy.Item):
    # define the fields for your item here like:
    test1 = scrapy.Field()
    ...
    test7 = scrapy.Field()

这是主程序

class crawler(scrapy.Spider):

...

    def parse (self, response):
        for data_house in jsondata["data"]["data"]:
            yield scrapy.Request(house_detail_domain.format(data_house["post_id"]), self.parse_house_detail)


    def parse_house_detail (self, response):
    ...    

    testitem = HelloItem()

    testitem["test1"] = house_detail.select(".houseInfoTitle")        
    ...
    testitem["test7"] = house_detail.select(".facility")[0].text
    return testitem

告诉我是否缺少任何信息

4

1 回答 1

0

酷,日志确实显示了问题

Traceback (most recent call last):
  File "C:\Users\meow\Anaconda3\lib\site-packages\twisted\internet\defer.py", line 653, in _runCallbacks
    current.result = callback(current.result, *args, **kw)
  File "C:\Users\meow\Desktop\hello\hello\pipelines.py", line 27, in process_item
    self.cur.execute(sql.format(col, placeholders), item.values())
ValueError: parameters are of unsupported type
2018-02-23 19:13:05 [scrapy.core.engine] INFO: Closing spider (finished)

2018-02-23 19:13:05 [scrapy.core.engine] INFO: Spider closed (finished)
于 2018-02-23T11:22:58.200 回答