抄袭ASPX网站的页面

时间:2018-07-18 00:19:17

标签: asp.net python-3.x web-scraping python-requests

在过去一个月左右的时间内,我一直在尝试从aspx网站上阅读几页。我在站点上找到所有必需的项目都没有问题,但是我尝试的解决方案仍然无法正常工作。我读到某处必须显示所有标头详细信息,因此我添加了它们。我还在某处读到__EVENTTARGET必须设置为告诉aspx已按下哪个按钮的位置,因此我尝试了一些其他操作(请参见下文)。我还读到应该建立一个处理cookie的会话-因此我也实现了它。到目前为止,我的代码片段产生的信息与我使用Web开发工具分析发布请求时得到的信息完全相同(打印行已被注释掉)-但是此代码始终为我提供第一页。有谁知道该代码中缺少什么才能正常工作。我还应该指出,硒或机械化并不是该项目的真正选择。

import requests
from bs4 import BeautifulSoup
import time
import collections
import json

def SPAIN_STK_LIST(numpage):
    payload = collections.OrderedDict()
    header = {'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
          'Accept-Encoding' : 'gzip, deflate',
          'Accept-language' : 'en-US,en;q=0.9',
          'Cache-Control' : 'max-age=0',
          'Connection' : 'keep-alive',
          'Content-Type': 'text/html; charset=utf-8',
          'Host' : 'www.bolsamadrid.es',
          'Origin' : 'null',
          'Upgrade-Insecure-Requests' : '1',
          'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebkit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36' 
          }
for i in range(0, numpage):
    ses = requests.session()
    if(i == 0):
        req = ses.get("http://www.bolsamadrid.es/ing/aspx/Empresas/Empresas.aspx", headers = header)
    else:
        req = ses.post("http://www.bolsamadrid.es/ing/aspx/Empresas/Empresas.aspx", headers = header, data = payload)
#        print(req.request.body)
#        print(req.request.headers)
#        print(req.request.url)
    page = req.text
    soup = BeautifulSoup(page, "lxml")
    # find __VIEWSTATE and __EVENTVALIDATION for the next page
    viewstate = soup.select("#__VIEWSTATE")[0]['value']
#        print("VIEWSTATE: ", viewstate)
    eventval = soup.select("#__EVENTVALIDATION")[0]['value']
#        print("EVENTVALIDATION: ", eventval)
    header = {'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
          'Accept-Encoding' : 'gzip, deflate',
          'Accept-language' : 'en-US,en;q=0.9',
          'Cache-Control' : 'max-age=0',
          'Connection' : 'keep-alive',
          'Content-Type': 'application/x-www-form-urlencoded',
          'Host' : 'www.bolsamadrid.es',
          'Origin' : 'null',
          'Upgrade-Insecure-Requests' : '1',
          'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebkit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36' 
          }
    target = "ct100$Contenido$GoPag{:=>2}"
    payload = collections.OrderedDict()
    payload['__EVENTTARGET'] = ""
    #payload['__EVENTTARGET'] = "GoPag"
    #payload['__EVENTTARGET'] = "ct100$Contenido$GoPag"
    #payload['__EVENTTARGET'] = target.format(i + 1)
    payload['__EVENTARGUMENT'] = ""
    payload['__VIEWSTATE'] = viewstate
    payload['__VIEWSTATEGENERATOR'] = "65A1DED9"
    payload['__EVENTVALIDATION'] = eventval
    payload['ct100$Contenido$GoPag'] = i + 1
    table = soup.find("table", {"id" : "ctl00_Contenido_tblEmisoras"})
    for row in table.findAll("tr")[1:]:
        cells = row.findAll("td")
        print(cells[0].find("a").get_text().replace(",","").replace("S.A.", ""))
    time.sleep(1)


SPAIN_STK_LIST(6)

请注意,因为这是第一个请求,所以第一个标头content-type设置为“ text / html”,但随后的所有请求均使用type-content为“ application / x-www-form-urlencoded”来完成。任何有关我下一步应该尝试的指标将不胜感激。 E。

2 个答案:

答案 0 :(得分:2)

最简单的方法如下所示。为什么要对那些__EVENTTARGET__VIEWSTATE等进行硬编码?让脚本来处理这些问题:

import requests
from bs4 import BeautifulSoup

url = "http://www.bolsamadrid.es/ing/aspx/Empresas/Empresas.aspx"

res = requests.get(url,headers = {"User-Agent":"Mozilla/5.0"})
soup = BeautifulSoup(res.text,"lxml")

for page in range(7):
    formdata = {}
    for item in soup.select("#aspnetForm input"):
        if "ctl00$Contenido$GoPag" in item.get("name"):
            formdata[item.get("name")] = page
        else:
            formdata[item.get("name")] = item.get("value")

    req = requests.post(url,data=formdata)
    soup = BeautifulSoup(req.text,"lxml")
    for items in soup.select("#ctl00_Contenido_tblEmisoras tr")[1:]:
        data = [item.get_text(strip=True) for item in items.select("td")]
        print(data)

假设您需要将表格数据分布在多个页面上。

答案 1 :(得分:0)

您需要在请求前设置payload

import requests
from bs4 import BeautifulSoup
import time
import collections
import json

def SPAIN_STK_LIST(numpage):
    payload = collections.OrderedDict()
    header = {'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
          'Accept-Encoding' : 'gzip, deflate',
          'Accept-language' : 'en-US,en;q=0.9',
          'Cache-Control' : 'max-age=0',
          'Connection' : 'keep-alive',
          'Content-Type': 'text/html; charset=utf-8',
          'Host' : 'www.bolsamadrid.es',
          'Origin' : 'null',
          'Upgrade-Insecure-Requests' : '1',
          'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebkit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36' 
          }

    ses = requests.session()

    viewstate = ""
    eventval = ""

    for i in range(0, numpage):

        if(i == 0):
            req = ses.get("http://www.bolsamadrid.es/ing/aspx/Empresas/Empresas.aspx", headers = header)

            page = req.text
            soup = BeautifulSoup(page, "lxml")
            # find __VIEWSTATE and __EVENTVALIDATION for the next page
            viewstate = soup.select("#__VIEWSTATE")[0]['value']
        #        print("VIEWSTATE: ", viewstate)
            eventval = soup.select("#__EVENTVALIDATION")[0]['value']

        else:

            header = {'Accept' : 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
                  'Accept-Encoding' : 'gzip, deflate',
                  'Accept-language' : 'en-US,en;q=0.9',
                  'Cache-Control' : 'max-age=0',
                  'Connection' : 'keep-alive',
                  'Content-Type': 'application/x-www-form-urlencoded',
                  'Host' : 'www.bolsamadrid.es',
                  'Origin' : 'null',
                  'Upgrade-Insecure-Requests' : '1',
                  'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebkit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36' 
                  }
            target = "ct100$Contenido$GoPag{:=>2}"
            payload = collections.OrderedDict()
            payload['__EVENTTARGET'] = "ctl00$Contenido$SiguientesArr"
            #payload['__EVENTTARGET'] = "GoPag"
            #payload['__EVENTTARGET'] = "ct100$Contenido$GoPag"
            #payload['__EVENTTARGET'] = target.format(i + 1)
            payload['__EVENTARGUMENT'] = ""
            payload['__VIEWSTATE'] = viewstate
            payload['__VIEWSTATEGENERATOR'] = "65A1DED9"
            payload['__EVENTVALIDATION'] = eventval
            # payload['ct100$Contenido$GoPag'] = i + 1
            payload['ct100$Contenido$GoPag'] = ""

            req = ses.post("http://www.bolsamadrid.es/ing/aspx/Empresas/Empresas.aspx", headers = header, data = payload)

            page = req.text
            soup = BeautifulSoup(page, "lxml")
            # find __VIEWSTATE and __EVENTVALIDATION for the next page
            viewstate = soup.select("#__VIEWSTATE")[0]['value']
        #        print("VIEWSTATE: ", viewstate)
            eventval = soup.select("#__EVENTVALIDATION")[0]['value']

    #        print(req.request.body)
    #        print(req.request.headers)
    #        print(req.request.url)

    #        print("EVENTVALIDATION: ", eventval)

            table = soup.find("table", {"id" : "ctl00_Contenido_tblEmisoras"})
            for row in table.findAll("tr")[1:]:
                cells = row.findAll("td")
                print( cells[0].find("a").get_text().replace(",","").replace("S.A.", "").encode('utf-8') )
            time.sleep(1)


SPAIN_STK_LIST(6)