我正在抓取一个网站,每天严格用户下载页面,大约(1000页),之后,用户无法登录,直到明天0点。
所以我注册了许多帐户来面对它。网站确实使用了cookie。
以下是我的问题,如何在用户过期时重新登录帐户并继续抓取“堆栈”中的旧页面。 这是我的代码可以帮助您理解我的问题。
def start_requests(self):
return [Request(self.start_urls[0], meta = {'cookiejar' : 1}, callback = self.login,dont_filter=True)]
def login(self, response):
self.account = self.accounts[self.line_count].split(",")
self.line_count = self.line_count+1
if(len(self.accounts)<=self.line_count):
self.line_count = 0;
self.log('Preparing login:'+self.account[0]+":"+self.account[1].rstrip())
return [FormRequest.from_response(response,
meta = {'cookiejar' : response.meta['cookiejar']},
headers = self.headers,
formdata = {
'j_email': self.account[0],
'j_password': self.account[1].rstrip(),
'submit': 'Ok'
},
callback = self.parse_url,
dont_filter = True,
)]
当我面对下面的问题时,我将再次启动,但它不起作用或错过很多页面(与会话已过期相同的错误)。
def parse_page_imo(self, response):
hxs = Selector(response)
loginfail = hxs.xpath('//table[@class="tab"]/tbody/tr/td/div[@id="encart"]/li/text()').extract()
if loginfail==([u'Your login (e-mail) or/and password are unknown in Equasis. Please, try again']):
print "relogin"
self.start_requests()
if loginfail==([u'Your session has expired, please try to login again']):
print "relogin"
self.start_requests()
if loginfail==([u'You have been disconnected or your login/password is unknown in Equasis. Please, try again.']):
print "relogin"
self.start_requests()
if loginfail==([u'By security, your session has been cancelled.']):
print "relogin"
self.start_requests()
....codes to parse items....
这是我的孔码:
# -*- coding:utf-8 -*-
from scrapy.contrib.spiders import CrawlSpider
from scrapy.http import Request, FormRequest
from imo_dlcosco_ships.settings import URLS, COOKIES, HEADER
from imo_dlcosco_ships.items import ShipListItem
from scrapy.selector import Selector
import time
class EquasisSpider(CrawlSpider):
name = 'imo_202'
allowed_domains = ["www.equasis.org"]
start_urls = [
"http://www.equasis.org/EquasisWeb/public/HomePage",
]
def __init__(self):
self.headers = HEADER
self.cookies = COOKIES
self.urls = URLS
f = open("account.txt", "r")
self.accounts = f.readlines()
f.close()
self.line_count = 0
#login
def start_requests(self):
return [Request(self.start_urls[0], meta = {'cookiejar' : 1}, callback = self.login,dont_filter=True)]
def login(self, response):
self.account = self.accounts[self.line_count].split(",")
self.line_count = self.line_count+1
if(len(self.accounts)<=self.line_count):
self.line_count = 0;
self.log('Preparing login:'+self.account[0]+":"+self.account[1].rstrip())
return [FormRequest.from_response(response,
meta = {'cookiejar' : response.meta['cookiejar']},
headers = self.headers,
formdata = {
'j_email': self.account[0],
'j_password': self.account[1].rstrip(),
'submit': 'Ok'
},
callback = self.parse_url,
dont_filter = True,
)]
def parse_url(self, response):
return [FormRequest(url="http://www.equasis.org/EquasisWeb/restricted/ShipSearchAdvanced?fs=ShipSearch",
meta = {'cookiejar' : response.meta['cookiejar']},
headers = self.headers,
cookies = self.cookies,
formdata = {
'P_PAGE': '1'
},
dont_filter = True,
callback = self.parse_imo_url,
)]
def parse_imo_url(self, response):
return [FormRequest(url="http://www.equasis.org/EquasisWeb/restricted/ShipList?fs=ShipSearch",
meta = {'cookiejar' : response.meta['cookiejar']},
headers = self.headers,
cookies = self.cookies,
formdata = {
'P_CLASS_ST_rb':'HC',
'P_CLASS_rb':'HC',
'P_CatTypeShip':'6',
'P_CatTypeShip_p2':'6',
'P_CatTypeShip_rb':'CM',
'P_DW_GT':'250000',
'P_DW_LT':'999999',
'P_FLAG_rb':'HC',
'P_PAGE':'1',
'Submit':'SEARCH'
},
dont_filter = True,
callback = self.parse_page_num,
)]
def parse_page_num(self,response):
hxs = Selector(response)
loginfail = hxs.xpath('//table[@class="tab"]/tbody/tr/td/div[@id="encart"]/li/text()').extract()
if loginfail==([u'Your login (e-mail) or/and password are unknown in Equasis. Please, try again']):
print "relogin"
self.start_requests()
if loginfail==([u'Your session has expired, please try to login again']):
print "relogin"
self.start_requests()
if loginfail==([u'You have been disconnected or your login/password is unknown in Equasis. Please, try again.']):
print "relogin"
self.start_requests()
if loginfail==([u'By security, your session has been cancelled.']):
print "relogin"
self.start_requests()
htmlurl = response._url.split('?')[0]
f = open('page.html','a')
f.write(response.body)
f.close()
if(htmlurl=='http://www.equasis.org/EquasisWeb/restricted/ShipList'):
temp1 = hxs.xpath('//form[@name="form"]/table[@class="tab"]/tbody/tr/td[@align="right"]/span/a/@onclick').extract()
temp2 = temp1[len(temp1)-1].split(";document")[0]
PageNum = temp2.split("P_PAGE.value=")[1].encode("utf-8")
for h in range(int(PageNum)):
yield FormRequest(url="http://www.equasis.org/EquasisWeb/restricted/ShipList?fs=ShipList",
meta={'cookiejar' : response.meta['cookiejar'],'pageNum':str(h+1)},
headers = self.headers,
cookies = self.cookies,
formdata = {
'P_CALLSIGN':'',
'P_IMO':'',
'P_NAME':'',
'P_PAGE':'%d' %(h+1)
},
dont_filter = True,
callback = self.parse_page_imo
)
def parse_page_imo(self, response):
hxs = Selector(response)
loginfail = hxs.xpath('//table[@class="tab"]/tbody/tr/td/div[@id="encart"]/li/text()').extract()
if(loginfail==([u'Your login (e-mail) or/and password are unknown in Equasis. Please, try again'])):
print "relogin"
self.start_requests()
if(loginfail == [u'Your session has expired, please try to login again']):
print "relogin"
self.start_requests()
if(loginfail == [u'You have been disconnected or your login/password is unknown in Equasis. Please, try again.']):
print "relogin"
self.start_requests()
if(loginfail == [u'By security, your session has been cancelled.']):
print "relogin"
self.start_requests()
htmlurl = response._url.split('?')[0]
if(htmlurl=='http://www.equasis.org/EquasisWeb/restricted/ShipList'):
item = ShipListItem()
shipNameHtml = hxs.xpath('//form[@name="formShip"]/table[@class="tab"]/tbody/tr/td[1]').extract()
shipHtmlTitle = Selector(text=shipNameHtml[0]).xpath('//text()').extract()
if(shipHtmlTitle[0].find('Name of ship')>-1):
item['ship_name'] = hxs.xpath('//form[@name="formShip"]/table[@class="tab"]/tbody/tr/td[1]/a/text()').extract()
onclickValue = hxs.xpath('//form[@name="formShip"]/table[@class="tab"]/tbody/tr/td[1]/a/@onclick').extract()
for i in range(len(onclickValue)):
onclickValue2 = onclickValue[i].split(";document")[0]
onclickValue3 = onclickValue2.split("P_IMO.value=")[1].encode("utf-8")
onclickValue[i] = onclickValue3.strip('\'')
item['imo'] = onclickValue
for h in range(len(item['imo'])):
p_imo = item['imo'][h]
ShipName = item['ship_name'][h]
p_imo = p_imo.rstrip()
yield FormRequest("http://www.equasis.org/EquasisWeb/restricted/ShipInfo?fs=ShipList",
meta = {'cookiejar' : response.meta['cookiejar'],'P_imo':p_imo,'ShipName':ShipName},
headers = self.headers,
cookies = self.cookies,
formdata = {
'P_IMO': p_imo
},
dont_filter = True,
callback = self.parse_page_mmsi,
)
def parse_page_mmsi(self,response):
hxs = Selector(response)
loginfail = hxs.xpath('//table[@class="tab"]/tbody/tr/td/div[@id="encart"]/li/text()').extract()
if(loginfail==([u'Your login (e-mail) or/and password are unknown in Equasis. Please, try again'])):
print "relogin"
self.start_requests()
if(loginfail == [u'Your session has expired, please try to login again']):
print "relogin"
self.start_requests()
if(loginfail == [u'You have been disconnected or your login/password is unknown in Equasis. Please, try again.']):
print "relogin"
self.start_requests()
if(loginfail == [u'By security, your session has been cancelled.']):
print "relogin"
self.start_requests()
shipHtml = hxs.xpath('//table[@class="encart"]/tbody/tr').extract()
item=ShipListItem()
item['mmsi'] = [u'']
for j in range(len(shipHtml)):
shipHtmlTitle = Selector(text=shipHtml[j]).xpath('//td[1]/text()').extract()
if(shipHtmlTitle[0].find('MMSI :')>-1):
item['mmsi'] = Selector(text=shipHtml[j]).xpath('//td[2]/text()').extract()
item['imo'] = response.meta['P_imo']
item['ship_name'] = response.meta['ShipName']
yield item
答案 0 :(得分:1)
最终我解决了我的问题。我写了一个下载中间件来处理它。当登录错误发生时,我暂停了蜘蛛并排队所有下一个请求并重新登录,然后恢复蜘蛛。一切似乎都没问题。
答案 1 :(得分:0)
start_requests
方法应返回scrapy.Request
个可迭代对象。只需在响应回调parse_page_imo
中调用它,只会获得一个临时值。您至少应该返回或产生这些值,例如:
for req in self.start_requests():
yield req
已编辑:同样在您的回复回调login
中,返回值也应为请求对象(而非列表)。