我刚刚开始使用Asyncio,而我正试图用它来解析一个网站。
我试图解析网站的6个部分(self.signals
),每个部分都有N个页面,上面有表格,所以基本上我试图将调用的循环异步化什么部分,并在每个部分中异步页面。这是我到目前为止所做的。
class FinViz():
def __init__(self):
self.url = 'https://finviz.com/screener.ashx?v=160&s='
self.signals = {
'Earnings_Before' : 'n_earningsbefore',
'Earnings_After' : 'n_earningsafter',
'Most_Active' : 'ta_mostactive',
'Top_Gainers' : 'ta_topgainers',
'Most_Volatile' : 'ta_mostvolatile',
'News' : 'n_majornews',
'Upgrade' : 'n_upgrades',
'Unusual_Volume' : 'ta_unusualvolume'
}
self.ticks = []
def _parseTable(self, data):
i, signal = data
url = self.signals[signal] if i == 0 else self.signals[signal] + '&r={}'.format(str(i * 20 + 1))
soup = BeautifulSoup(urlopen(self.url + url, timeout = 3).read(), 'html5lib')
table = soup.find('div', {'id' : 'screener-content'}).find('table',
{'width' : '100%', 'cellspacing': '1', 'cellpadding' : '3', 'border' : '0', 'bgcolor' : '#d3d3d3'})
for row in table.findAll('tr'):
col = row.findAll('td')[1]
if col.find('a'):
self.ticks.append(col.find('a').text)
async def parseSignal(self, signal):
try:
soup = BeautifulSoup(urlopen(self.url + self.signals[signal], timeout = 3).read(), 'html5lib')
tot = int(soup.find('td', {'class' : 'count-text'}).text.split()[1])
with concurrent.futures.ThreadPoolExecutor(max_workers = 20) as executor:
loop = asyncio.get_event_loop()
futures = []
for i in range(tot // 20 + (tot % 20 > 0)):
futures.append(loop.run_in_executor(executor, self._parseTable, (i, signal)))
for response in await asyncio.gather(*futures):
pass
except URLError:
pass
async def getAll(self):
with concurrent.futures.ThreadPoolExecutor(max_workers = 20) as executor:
loop = asyncio.get_event_loop()
futures = []
for signal in self.signals:
futures.append(await loop.run_in_executor(executor, self.parseSignal, signal))
for response in await asyncio.gather(*futures):
pass
print(self.ticks)
if __name__ == '__main__':
x = FinViz()
loop = asyncio.get_event_loop()
loop.run_until_complete(x.getAll())
这确实成功完成了工作,但它比我在没有asyncio
的情况下进行解析时更慢。
异步菜鸟的任何提示?
编辑:添加完整代码
答案 0 :(得分:0)
请记住,python有一个GIL,所以线程化代码对性能没有帮助。要潜在地加快速度,请使用ProcessPoolExecutor,但是请注意,您会产生以下开销:
如果您在fork安全的环境中运行并将数据存储在全局变量中,则可以避免1.。
您还可以执行诸如共享内存映射文件之类的操作...也共享原始字符串/字节是最快的。