我有一个线程池执行器,它有 5 个工人,它们向 API 发起发布请求, API 每 60 秒可以处理 5 个用户,我试图让池等待 60 秒直到下一个 API 发布请求,但它不起作用
with futures.ThreadPoolExecutor(self.bulk_size) as executor:
future_to_id = {
executor.submit(
self.post_request, dict_item): self.get_id_and_origin_list(dict_item) for dict_item in Grouped_bach
}
time.sleep(60) # I am trying to make it sleep here but the API keeps blocking me as if it takes more than the limit
for future in futures.as_completed(future_to_id):
user_id,origin_list = future_to_id[future]
data = future.result()
if not raw_results:
data = self.response_converter(user_id,origin_list, data)
self.total_response += data