在此页面上,有一个按钮显示为Download CSV
:
http://www.nasdaqomxnordic.com/aktier/microsite?Instrument=CSE77855&name=Pandora&ISIN=DK0060252690
如何使用Python下载文件?
页面的html读取:
<a class="floatRight exportTrades" id="exportIntradayTradesCSV">Download CSV</a>
<script>
// #*
var tradesForShare = {
load: function () {
var q = {
"SubSystem": "Prices",
"Action": "GetInstrument",
"inst.an": "nm",
"inst.e": "3",
"Exchange": "NMF",
"Instrument": webCore.getInstrument(),
"cache": "skip",
"app": location["pathname"],
"datasource": "prod",
"translationFile": "translation",
"DefaultDecimals": false
};
$("#tradesForShareOutput").loading("/static/nordic/css/img/loading.gif");
var nordicRTI = NordicRTI.getInstance();
var url = window.webCore.getWebAPIURL("prod", "MarketData/GetMarketData", true);
var tradesRTI = new RTIObject(url, q, function (data) {
tradesForShare.parseData(data);
console.log(tradesRTI);
});
nordicRTI.addRTIObject(tradesRTI);
if($("tradesForShareTable").has("tr.odd")) {
$('.exportTrades').removeClass('disabled');
$('.exportTrades.disabled').css("pointer-events","auto");
} else {
$('.exportTrades').addClass('disabled');
$('.exportTrades').css("pointer-events","none");
}
/*webCore.getMarketData(q, function (data) {
tradesForShare.parseData(data);
}, true);*/
//var url = window.webCore.getWebAPIURL("prod", "MarketData/GetMarketData", true);
/*$.getJSON(url, q, function (data) {
tradesForShare.parseData(data);
});*/
/*$.ajax({
type: "get",
url: url,
data: q,
dataType: "jsonp",
cache: true,
success: function (data) {
tradesForShare.parseData(data);
},
jsonp: "callback"
});*/
//setTimeout ( tradesForShare.load, 1000*30 ); // update every minute
},
parseData: function (data) {
if(data.instruments != null) {
$("#tradesForShareOutput").empty();
var table = $("<table></table>").attr("id", "tradesForShareTable").addClass("tablesorter");
var thead = $("<thead></thead>");
var row = $("<tr></tr>");
var kurs = $("<th></th>").text(webCore.getTranslationFor("trades", "p", data));// data.attributeTranslations.trades.p.trans[window.currentLanguage]);
var vol = $("<th></th>").text(webCore.getTranslationFor("trades", "v", data));// data.attributeTranslations.trades.v.trans[window.currentLanguage]);
var name = $("<th></th>").text(webCore.getTranslationFor("trades", "nm", data));// data.attributeTranslations.trades.nm.trans[window.currentLanguage]);
var buyer = $("<th></th>").text(webCore.getTranslationFor("trades", "b", data));// data.attributeTranslations.trades.b.trans[window.currentLanguage]);
var seller = $("<th></th>").text(webCore.getTranslationFor("trades", "s", data));// data.attributeTranslations.trades.s.trans[window.currentLanguage]);
var time = $("<th></th>").text(webCore.getTranslationFor("trades", "t", data));// data.attributeTranslations.trades.t.trans[window.currentLanguage]);
row.append(kurs).append(vol).append(name).append(buyer).append(seller).append(time);
thead.append(row);
var tbody = $("<tbody></tbody>");
$.each(data.instruments[webCore.getInstrument().toLowerCase()].trades, function (k, v) {
row = $("<tr></tr>");
kurs = $("<td></td>").text(webCore.formatNumeric(v.values.p, 3));
vol = $("<td></td>").text(window.webCore.formatNumeric(v.values.v, 0));
name = $("<td></td>").text(v.values.nm);
buyer = $("<td></td>").text(v.values.b);
seller = $("<td></td>").text(v.values.s);
time = $("<td></td>").text(webCore.getTimeFromDateString(v.values.t));
row.append(kurs).append(vol).append(name).append(buyer).append(seller).append(time);
tbody.append(row);
});
table.append(thead).append(tbody);
$("#tradesForShareOutput").append(table);
$("#tradesForShareTable").tablesorter({widgets: ['zebra']});
}
},
excel: function () {
var instrument = null;
instrument = window.webCore.getInstrument();
var utc = new Date().toJSON().slice(0,10).replace(/-/g,'-');
$("#xlsForm").attr( "action", webCore.getProxyURL("prod"));
var xmlquery = webCore.createQuery( Utils.Constants.marketAction.getTrades, {}, {
t__a: "1,2,5,10,7,8,18",
FromDate : utc,
Instrument : instrument,
ext_contenttype : "application/vnd.ms-excel",
ext_contenttypefilename : "share_export.xls",
ext_xslt:"t_table_simple.xsl",
ext_xslt_lang: currentLanguage,
showall: "1"
});
console.log(xmlquery);
$("#xmlquery").val( xmlquery );
$("#xlsForm").submit();
}
};
$(function () {
tradesForShare.load();
$("#exportIntradayTradesCSV").on({
click: function (e) {
tradesForShare.excel();
//window.webCore.exportTableToCSVClickEvent($("#exportIntradayTradesCSV"), $("#tradesForShareOutput"), '_' + window.webCore.getInstrument() + '.csv');
}
});
});
</script>
我尝试在Google Chrome中使用Inspect
并单击Event Listeners
。
单击按钮时,我得到以下输出:
<post>
<param name="SubSystem" value="Prices"/>
<param name="Action" value="GetTrades"/>
<param name="Exchange" value="NMF"/>
<param name="t__a" value="1,2,5,10,7,8,18"/>
<param name="FromDate" value="2018-08-29"/>
<param name="Instrument" value="CSE77855"/>
<param name="ext_contenttype" value="application/vnd.ms-excel"/>
<param name="ext_contenttypefilename" value="share_export.xls"/>
<param name="ext_xslt" value="/nordicV3/t_table_simple.xsl"/>
<param name="ext_xslt_lang" value="en"/>
<param name="showall" value="1"/>
<param name="app" value="/aktier/microsite"/>
</post>
所以我想我可以做些类似于以下的事情,但是不起作用;参见下面的输出。
import requests
url = 'http://www.nasdaqomxnordic.com/WebAPI/api/MarketData/GetMarketData'
params = {
"SubSystem": "Prices",
"Action": "GetTrades",
"Exchange": "NMF",
"t__a": "1,2,5,10,7,8,18",
"FromDate": "2018-08-29",
"Instrument": "CSE77855",
"ext_contenttype": "application/vnd.ms-excel",
"ext_contenttypefilename": "share_export.xls",
"ext_xslt": "/nordicV3/t_table_simple.xsl",
"ext_xslt_lang": "en",
"showall": "1",
"app": "/aktier/microsite",
}
r = requests.get(url, params=params)
print(r.json())
我得到以下输出:
{'linkCall': 'SubSystem=Prices&Action=GetTrades&Exchange=NMF&t.a=1&t.a=2&t.a=5&t.a=10&t.a=7&t.a=8&t.a=18&FromDate=2018-08-29&Instrument=CSE77855&ext_contenttype=application%2fvnd.ms-excel&ext_contenttypefilename=share_export.xls&ext_xslt=%2fnordicV3%2ft_table_simple.xsl&ext_xslt_lang=en&showall=1&app=%2faktier%2fmicrosite', 'instruments': None, 'derivatives': None, 'warrants': None, 'attributeTranslations': {}, 'message': None, 'success': False}
如果可能,我想避免使用Selenium
。
答案 0 :(得分:2)
检查html,我注意到表单的动作是/webproxy/DataFeedProxy.aspx
,方法是post
。这意味着该表单是通过POST请求提交给http://www.nasdaqomxnordic.com/webproxy/DataFeedProxy.aspx
的。该表单具有一个名称为xmlquery
的字段,并在您的问题中使用html值。下面的代码应下载文件。
import requests
url = 'http://www.nasdaqomxnordic.com/webproxy/DataFeedProxy.aspx'
xmlquery = '''<post>
<param name="SubSystem" value="Prices"/>
<param name="Action" value="GetTrades"/>
<param name="Exchange" value="NMF"/>
<param name="t__a" value="1,2,5,10,7,8,18"/>
<param name="FromDate" value="2018-08-29"/>
<param name="Instrument" value="CSE77855"/>
<param name="ext_contenttype" value="application/vnd.ms-excel"/>
<param name="ext_contenttypefilename" value="share_export.xls"/>
<param name="ext_xslt" value="/nordicV3/t_table_simple.xsl"/>
<param name="ext_xslt_lang" value="en"/>
<param name="showall" value="1"/>
<param name="app" value="/aktier/microsite"/>
</post>'''
r = requests.post(url, data = {'xmlquery': xmlquery})
html = r.text
该文件不是csv(我从浏览器获得的文件也不是),其扩展名为.xls,但包含一个大型html表。但是,您可以借助BeautifulSoup
和csv
创建一个csv文件。
from bs4 import BeautifulSoup
import csv
soup = BeautifulSoup(html, 'html.parser')
names = [i.text for i in soup.select('th')] + ['Name']
values = [
[td.text for td in tr.select('td')] + [tr.td['title'].rstrip(' - ')]
for tr in soup.select('tr')[1:]
]
with open('file.csv', 'w', newline='') as f:
writer = csv.writer(f)
writer.writerow(names)
writer.writerows(values)
请注意,BeautifulSoup
解析文件可能会花费一些时间,因为它很大。如果您使用的是Python 2x,则open
不接受newline
参数。在这种情况下,您必须以二进制模式打开文件,否则它可能包含空行。
如tommy.carstensen所述,pandas
更适合此任务。它具有正确的工具(read_html
和to_csv
),并且比BeautifulSoup
更快。
import pandas as pd
pd.read_html(htm_string, index_col='Time', parse_dates=True)[0].to_csv(path)
Name
列不包含在文件中,因为它不在表列中,而是title
属性的值。但是我们可以通过其他方式获取此列-例如,从原始URL。由于所有列都相同,因此我们只需使用查询字符串的Name
值创建一个新的name
列即可。
import pandas as pd
from urllib.parse import urlparse, parse_qs
url = 'http://www.nasdaqomxnordic.com/aktier/microsite?Instrument=CSE77855&name=Pandora&ISIN=DK0060252690'
df = pd.read_html(html, index_col='Time', parse_dates=True)[0]
df['Name'] = parse_qs(urlparse(url).query)['name'][0]
df.to_csv('file.csv')
答案 1 :(得分:0)
除了 t.m.adam 提出的出色解决方案之外,我还必须在标头中指定/模拟 User-Agent
才能从 post 调用中获得响应:
headers = {'User-Agent': 'Safari/526.5'}
r = requests.post(url, data={'xmlquery': xmlquery}, headers=headers)