我想使异步工作同步完成。我使用沙发床中的数据ID作为Redis的键。我使用redis的值再次增加了沙发床。当它同步运行时,它可以正常工作,但是当它异步执行时,它会失败。
from couchbase.cluster import Cluster
from couchbase.cluster import PasswordAuthenticator
from couchbase.n1ql import N1QLQuery
from couchbase.n1ql import N1QLRequest
from elasticsearch import Elasticsearch
from time import sleep
from urllib import request
from wikidata.client import Client
from openpyxl import Workbook
import base64
import json
import osmapi
import re
import redis
import urllib
import hashlib
import time
import requests
import csv
from functools import reduce
wikid_client = Client()
es_client = Elasticsearch("--")
cluster = Cluster('couchbase://--')
authenticator = PasswordAuthenticator('ATLAS-DEV','ATLAS-DEV')
cluster.authenticate(authenticator)
bucket_test = cluster.open_bucket('ATLAS-DEV')
redis_db = redis.StrictRedis(host='--', port=6379, db=10, decode_responses=True)
map_api = osmapi.OsmApi()
N1QLQuery.timeout = 3600
def deep_get(dictionary, keys, default=None):
return reduce(lambda d, key: d.get(key, default) if isinstance(d, dict) else default, keys.split("."), dictionary)
query = N1QLQuery("SELECT meta().id, * FROM `ATLAS-DEV` WHERE class_type = 'REGION' AND (codes.osm IS NOT NULL OR codes.osm != '') LIMIT 20")
for k in bucket_test.n1ql_query(query):
# print("k : ")
# print(k)
document_id = k.get("id")
# print("document_id : " + document_id)
osm_id = deep_get(k, "ATLAS-DEV.codes.osm")
polygon = redis_db.hget(name="osm:polygons", key=osm_id)
if polygon is not None:
k['ATLAS-DEV'].update({'boundaries': polygon})
bucket_test.upsert(document_id, k['ATLAS-DEV'])
这是我的同步工作。它运作良好。它可以增加沙发床。
但这是我的芹菜工作代码。
from app.jobs.task import each_k
query = N1QLQuery("SELECT meta().id, * FROM `ATLAS-DEV` WHERE class_type = 'REGION' AND (codes.osm IS NOT NULL OR codes.osm != '') LIMIT 5")
for k in bucket_test.n1ql_query(query):
# print("k : ")
# print(k)
each_k.delay(k)
# /app/jobs/task.py
def deep_get(dictionary, keys, default=None):
return reduce(lambda d, key: d.get(key, default) if isinstance(d, dict) else default, keys.split("."), dictionary)
@app.task
@provide_redis
def each_k(redis_db, k):
print("레디스 디비 : ")
print(redis_db)
document_id = k.get("id")
print("document_id : " + document_id)
osm_id = deep_get(k, "ATLAS-DEV.codes.osm")
polygon = redis_db.hget(name="osm:polygons", key=osm_id)
print("폴리곤 : ")
print(polygon)
if polygon is not None:
k['ATLAS-DEV'].update({'boundaries': polygon})
bucket_test.upsert(document_id, k['ATLAS-DEV'])
它返回
[warn] kevent: Bad file descriptor
python-couchbase: self->nremaining == 0 at src/oputil.c:67. Abort[2019-06-26 15:13:03,675: ERROR/MainProcess] Task handler raised error: WorkerLostError('Worker exited prematurely: signal 6 (SIGABRT).')
Traceback (most recent call last):
File "/Users/n18016/Library/Python/3.7/lib/python/site-packages/billiard/pool.py", line 1226, in mark_as_worker_lost
human_status(exitcode)),
billiard.exceptions.WorkerLostError: Worker exited prematurely: signal 6 (SIGABRT).
[2019-06-26 15:13:03,701: ERROR/MainProcess] Process 'ForkPoolWorker-11' pid:7697 exited with 'signal 6 (SIGABRT)'
[2019-06-26 15:13:03,702: ERROR/MainProcess] Process 'ForkPoolWorker-10' pid:7696 exited with 'signal 6 (SIGABRT)'
[2019-06-26 15:13:03,703: ERROR/MainProcess] Task handler raised error: WorkerLostError('Worker exited prematurely: signal 6 (SIGABRT).')
Traceback (most recent call last):
File "/Users/n18016/Library/Python/3.7/lib/python/site-packages/billiard/pool.py", line 1226, in mark_as_worker_lost
human_status(exitcode)),
billiard.exceptions.WorkerLostError: Worker exited prematurely: signal 6 (SIGABRT).
[2019-06-26 15:13:03,704: ERROR/MainProcess] Task handler raised error: WorkerLostError('Worker exited prematurely: signal 6 (SIGABRT).')
Traceback (most recent call last):
File "/Users/n18016/Library/Python/3.7/lib/python/site-packages/billiard/pool.py", line 1226, in mark_as_worker_lost
human_status(exitcode)),
billiard.exceptions.WorkerLostError: Worker exited prematurely: signal 6 (SIGABRT).
答案 0 :(得分:1)
https://github.com/celery/celery/issues/4113
var string = 'abc'
if (/[A-Za-z]/.test(string)) {
console.log('bad')
} else {
console.log('good')
}
此cli选项可以挽救我的生命。它运作良好。该命令可以很好地运行异步单线程。
可能不是所有情况的答案。如果您想在celery worker --pool solo ...
中进行操作,这就是答案。