找不到改在哪里关闭driver,代理ip这样设置也报错,只有一个爬虫,driver在爬虫创建和中间件常见有什么不同,单线程和多线程中创建driver又有什么不同
def process_request(self, request, spider):
# Called for each request that goes through the downloader
# middleware.
# Must either:
# - return None: continue processing this request
# - or return a Response object
# - or return a Request object
# - or raise IgnoreRequest: process_exception() methods of
# installed downloader middleware will be called
ip_list = self.get_ip()
# request.meta['proxy'] = 'http://'+choice(ip_list)
self.driver.get(request.url)
sleep(2)
response=HtmlResponse(request.url,body=self.driver.page_source,request=request,encoding='utf8')
# print(self.driver.page_source)
# self.driver.quit()
return response
