Traceback (most recent call last):
File "filter_quotes.py", line 451, in <module>
stocks_filter.symbol_ids();
File "filter_quotes.py", line 216, in symbol_ids
self.symbol_ids_list = p.map(parallel_request, self.batch_result)
File "/usr/lib/python3.6/multiprocessing/pool.py", line 266, in map
return self._map_async(func, iterable, mapstar, chunksize).get()
File "/usr/lib/python3.6/multiprocessing/pool.py", line 644, in get
raise self._value
File "/usr/lib/python3.6/multiprocessing/pool.py", line 424, in _handle_tasks
put(task)
File "/usr/lib/python3.6/multiprocessing/connection.py", line 206, in send
self._send_bytes(_ForkingPickler.dumps(obj))
File "/usr/lib/python3.6/multiprocessing/reduction.py", line 51, in dumps
cls(buf, protocol).dump(obj)
AttributeError: Can't pickle local object 'FilterStocks.symbol_ids.<locals>.parallel_request'
我正在努力使用以下类方法。我以这样的方式编写了函数parallel_request()
,我可以将它与Pool()
类一起使用。但是,我得到了上面的追溯,我不知道如何解决它。我如何在类parallel_request()
()中使用method symbol_ids
函数,以便它将修复之前的AttributeError?
from multiprocessing import Pool
def symbol_ids(self):
p = Pool()
def parallel_request(self, symbols):
response = requests.get(''.join((self.uri, symbols)), headers=self.headers)
return response.json()
self.symbol_ids_list = p.map(parallel_request, self.batch_result)
self.symbol_ids_result = [element['symbolId'] for element in self.symbol_ids_list]
上述功能源于底部测试代码。
我使用以下类从Questrade API(http://www.questrade.com/api/documentation/rest-operations/market-calls/markets-quotes-id)请求信息。我有超过11,000个股票代码,我请求Questrade API批量为100个符号。
from multiprocessing import Pool
import requests
import logging
import ast
import re
TOKEN_FILE = '../../token.txt'
class Test:
def __init__(self, access_token, api_server):
self.access_token = access_token
self.api_server = api_server
self.uri = '{}v1/symbols?names='.format(self.api_server)
self.headers = {'Authorization': 'Bearer {}'.format(self.access_token)}
def parallel_request(self, symbols):
print(symbols)
response = requests.get(''.join((self.uri, symbols)), headers=self.headers)
return response.json()
def process(self):
with open('Output.txt', 'r') as f:
batch_result = ast.literal_eval(f.read())
# symbol_ids_list = []
# for symbols in batch_result:
# response = requests.get(''.join((self.uri, symbols)), headers=headers)
# symbol_ids_list.extend(response.json().get('symbols'))
p = Pool()
test = p.map(self.parallel_request, batch_result)
if __name__ == '__main__':
with open(TOKEN_FILE, "r") as token_json:
token_json = ast.literal_eval(re.search('{.+}', token_json.read()).group(0))
access_token = token_json["access_token"]
api_server = token_json["api_server"].replace("\\", "")
obj = Test(access_token, api_server)
obj.process()