我发布了一个与old相关的新问题,以解决队列获取问题。 这是代码(感谢Martijn Pieters)
import asyncio
import sys
import json
import os
import websockets
async def socket_consumer(socket, outgoing):
# take messages from the web socket and push them into the queue
async for message in socket:
await outgoing.put(message)
file = open(r"/home/host/Desktop/FromSocket.txt", "a")
file.write("From socket: " + ascii(message) + "\n")
file.close()
async def socket_producer(socket, incoming):
# take messages from the queue and send them to the socket
while True:
message = await incoming.get()
file = open(r"/home/host/Desktop/ToSocket.txt", "a")
file.write("To socket: " + ascii(message) + "\n")
file.close()
await socket.send(message)
async def connect_socket(incoming, outgoing, loop=None):
header = {"Authorization": r"Basic XXX="}
uri = 'XXXXXX'
async with websockets.connect(uri, extra_headers=header) as web_socket:
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
socket_consumer(web_socket, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
socket_producer(web_socket, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# pipe support
async def stdio(loop=None):
if loop is None:
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
await loop.connect_read_pipe(
lambda: asyncio.StreamReaderProtocol(reader), sys.stdin)
writer_transport, writer_protocol = await loop.connect_write_pipe(
asyncio.streams.FlowControlMixin, os.fdopen(sys.stdout.fileno(), 'wb'))
writer = asyncio.streams.StreamWriter(
writer_transport, writer_protocol, None, loop)
return reader, writer
async def pipe_consumer(pipe_reader, outgoing):
# take messages from the pipe and push them into the queue
while True:
message = await pipe_reader.readline()
if not message:
break
file = open(r"/home/host/Desktop/FromPipe.txt", "a")
file.write("From pipe: " + ascii(message.decode('utf8')) + "\n")
file.close()
await outgoing.put(message.decode('utf8'))
async def pipe_producer(pipe_writer, incoming):
# take messages from the queue and send them to the pipe
while True:
json_message = await incoming.get()
file = open(r"/home/host/Desktop/ToPipe.txt", "a")
file.write("Send to pipe message: " + ascii(json_message) + "\n")
file.close()
try:
message = json.loads(json_message)
message_type = int(message.get('header', {}).get('messageID', -1))
except (ValueError, TypeError, AttributeError):
# failed to decode the message, or the message was not
# a dictionary, or the messageID was convertable to an integer
message_type = None
file = open(r"/home/host/Desktop/Error.txt", "a")
file.write(" Error \n")
file.close()
# 1 is DENM message, 2 is CAM message
file.write("Send to pipe type: " + type)
if message_type in {1, 2}:
file.write("Send to pipe: " + json_message)
pipe_writer.write(json_message.encode('utf8') + b'\n')
await pipe_writer.drain()
async def connect_pipe(incoming, outgoing, loop=None):
reader, writer = await stdio()
# create tasks for the consumer and producer. The asyncio loop will
# manage these independently
consumer_task = asyncio.ensure_future(
pipe_consumer(reader, outgoing), loop=loop)
producer_task = asyncio.ensure_future(
pipe_producer(writer, incoming), loop=loop)
# start both tasks, but have the loop return to us when one of them
# has ended. We can then cancel the remainder
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# force a result check; if there was an exception it'll be re-raised
for task in done:
task.result()
def main():
loop = asyncio.get_event_loop()
pipe_to_socket = asyncio.Queue(loop=loop)
socket_to_pipe = asyncio.Queue(loop=loop)
socket_coro = connect_socket(pipe_to_socket, socket_to_pipe, loop=loop)
pipe_coro = connect_pipe(socket_to_pipe, pipe_to_socket, loop=loop)
loop.run_until_complete(asyncio.gather(socket_coro, pipe_coro))
main()
此代码是父进程通过
调用的子进程subprocess.Popen(["python3", test], stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=2048)
问题是对象socket_consumer
在队列中(从套接字接收),但是pipe_producer
不在incoming.get()
处。
编写文件仅用于测试目的。
此刻的父母是这个(仅用于测试)
test = r"/home/host/PycharmProjects/Tim/Tim.py"
process = subprocess.Popen(["python3", test],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=2048)
for i in range(5):
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400}, the rest of json...}}';
jsonValueBytes = message.encode("utf-8")
process.stdin.write(jsonValueBytes + b"\n")
process.stdin.close()
process.wait()
我使用以下代码代替发送到Web套接字:
#!/usr/bin/env python
import asyncio
import websockets
async def hello(uri):
header = {"Authorization": r"Basic XXXX="}
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400},"cam":{"generationDeltaTime":1,"camParameters":{"basicContainer":{"stationType":5,"referencePosition":{"latitude":451114425,"longitude":76720957,"positionConfidenceEllipse":{"semiMajorConfidence":4095,"semiMinorConfidence":4095,"semiMajorOrientation":3601},...other fields}}';
async with websockets.connect(uri, extra_headers=header) as websocket:
await websocket.send(message)
asyncio.get_event_loop().run_until_complete(
hello('XXX'))
它通过管道发送并起作用,因为我在管道上接收并发送到套接字(文件FromPipe.txt和ToSocket.txt正确)。
然后,我将代码发送到带有打开的Web套接字的服务器,然后该服务器将消息发送给孩子。当孩子从套接字接收到文件FromSocket.txt时,将创建该文件,但不会创建ToPipe.txt,直到将其放在awit incoming.get()
FromSocket.txt
具有以下内容:
From socket: '{"header":{"protocolVersion":1,"messageID":2,"stationID":400},"cam":{"generationDeltaTime":1, ... other field}}'
但是如果类型检索存在问题,那么它将创建文件,因为它是json_message = await incoming.get()
之后的第一条指令
我认为队列有问题。
为了进行测试,我在等待incoming.get()
之后将outgoing.put(message)
放入了socket_consumer中。
更新:如果我仅运行子级(因此不使用管道),则ToPipe.txt是正确的,并且从套接字传输到管道的消息很好。 对于我的测试,我运行父级,它将父级发送给管道的一条消息,然后子级将消息发送到套接字,然后子级捕获该消息,但它没有发送至管道和ToPipe.txt未创建。主要方法可能有问题
答案 0 :(得分:1)
您正在将双重编码的JSON写入子进程:
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400}, the rest of json...}}';
jsonValue = json.dumps(message)
message
已经是一个JSON字符串,因此jsonValue
是一个经过双重编码的JSON字符串。
管道使用者将这个双重编码的字符串推送到套接字的队列中。接下来,socket_producer()
中的Websocket生产者对消息再次进行 编码:
while True:
message = await incoming.get()
# ...
json_message = json.dumps(message)
await socket.send(json_message)
所以现在json_message
是一个三重编码的JSON值,一个包含JSON文档的JSON文档,其中JSON文档包含JSON文档:
>>> import json
>>> message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400}}}' # valid JSON
>>> json_message = json.dumps(message)
>>> print(json_message) # double-encoded
"{\"header\":{\"protocolVersion\":1,\"messageID\":2,\"stationID\":400}}}"
>>> json_message = json.dumps(json_message) # encode *again*
>>> print(json_message) # triple-encoded
"\"{\\\"header\\\":{\\\"protocolVersion\\\":1,\\\"messageID\\\":2,\\\"stationID\\\":400}}}\""
我不知道您的Web套接字到底是做什么的,但是假设它一次使用json.loads()
,然后回显解码后的消息。这意味着socket_consumer()
会收到一个仅编码两次的JSON文档。您的FromSocket.txt
日志肯定暗示发生了这种情况,因为它包含 double 编码的JSON消息:
您可以在FromSocket.txt
日志中看到此内容:
From socket: "{\"header\":{\"protocolVersion\":1,\"messageID\":2,\"stationID\":400},\"cam\":{\"generationDeltaTime\":1,...other fields}}"
请注意那些\"
条目,整个文档都用引号引起来,但是值中没有\\\
三连反斜杠。
不过,这种额外的JSON编码分层打破了pipe_producer()
协程,协程希望消息解码为字典,而不是其他字符串(即使该字符串包含另一个JSON文档):
message = json.loads(json_message)
type = int(message.get('header', {}).get('messageID', -1))
message
将改为解码为字符串,因此message.get
将失败,并返回AttributeError
,导致协程退出:
>>> json_message = "{\"header\":{\"protocolVersion\":1,\"messageID\":2,\"stationID\":400}}}" # double encoded
>>> message = json.loads(json_message)
>>> message # Back one stop, single-encoded JSON
'{"header":{"protocolVersion":1,"messageID":2,"stationID":400}}}'
>>> type(message) # it's a string with JSON, not a dictionary
<class 'str'>
>>> message.get('header')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: 'str' object has no attribute 'get'
您需要确保不会对数据进行太多编码!如果您的管道接收到JSON数据,则在将其发送到套接字时,不要再次对数据进行编码。从父进程将数据发送到管道时,请不要对数据进行双重编码,如果您已经有了JSON字符串,则再次通过json.dumps()
传递值没有任何价值。 / p>
在协程中添加更多的故障保护也是谨慎的。我没有使JSON解码足够健壮,因此请补救这一部分:
async def pipe_producer(pipe_writer, incoming):
# take messages from the queue and send them to the pipe
while True:
json_message = await incoming.get()
try:
message = json.loads(json_message)
type = int(message.get('header', {}).get('messageID', -1))
except (ValueError, TypeError, AttributeError):
# failed to decode the message, or the message was not
# a dictionary, or the messageID was convertable to an integer
type = None
# 1 is DENM message, 2 is CAM message
if type in {1, 2}:
pipe_writer.write(json_message.encode('utf8') + b'\n')
await pipe_writer.drain()
您可能想记录解码在某处失败(将消息推送到日志队列,然后由一个单独的任务将其写入日志)。
接下来,我们可以更新connect_*
函数以不忽略完成任务中的异常:
done, pending = await asyncio.wait(
[consumer_task, producer_task], return_when=asyncio.FIRST_COMPLETED)
for task in pending:
task.cancel()
# force a result check; if there was an exception it'll be re-raised
for task in done:
task.result()
done.result()
检查可以重新引发在消费者或生产者中引发的异常。由于connect_*
协程是通过asyncio.gather()
运行的,而loop.run_until_complete()
是由main()
运行的,因此该异常会一直传播到for task in done: task.result()
函数,因此它将退出Python,您将看到回溯打印。我已经更新了其他答案,以包含task.result()
循环,因为这仍然是一个好习惯。
使用 just (在我的原始答案代码中是$ python3.7 so52291218.py
{"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
Traceback (most recent call last):
File "so52291218.py", line 140, in <module>
main()
File "so52291218.py", line 137, in main
loop.run_until_complete(asyncio.gather(socket_coro, pipe_coro))
File "/.../lib/python3.7/asyncio/base_events.py", line 568, in run_until_complete
return future.result()
File "so52291218.py", line 126, in connect_pipe
task.result()
File "so52291218.py", line 104, in pipe_producer
type = int(message.get("header", {}).get("messageID", -1))
AttributeError: 'str' object has no attribute 'get'
循环)和一个仅将消息回显并输入有效JSON文档(未进行双重编码)的websocket,立即看到错误;父进程是我的终端,所以我只是将JSON消息复制到我的终端窗口中,以将数据发送到管道中:
json.dumps()
从socket_producer()
或删除json.loads()
呼叫时,我将Websocket服务器更改为在收到的消息上使用subprocess.Popen()
,并将其作为结果,然后代码工作,并且我得到相同的消息回显到我的终端。
请注意,当stdin
和stdout
均为管道时,您不能仅使用循环来写入stdout
管道。您可以通过仅编写循环来平凡地使子进程挂在I / O上。您还必须确保从Popen()
管道读取数据,但是由于子进程将以有效的随机顺序从这些句柄进行读写,因此您的父进程也会也必须异步处理stdout
管道的I / O。
我没有告诉您如何做(堆栈溢出的其他地方已经介绍过),而是告诉您使用pexpect
project和already has done all that work for you(通过生成单独的线程)连续从pexpect.popen_spawn.PopenSpawn()
管道读取);使用import sys
import pexpect
test = '...'
process = pexpect.popen_spawn.PopenSpawn([sys.executable, test])
for i in range(5):
message = '{"header":{"protocolVersion":1,"messageID":2,"stationID":400}}';
jsonValueBytes = message.encode("utf-8")
process.send(jsonValueBytes + b"\n")
# echo anything coming back
while True:
index = process.expect([process.crlf, pexpect.EOF, pexpect.TIMEOUT], timeout=0.1)
if not process.before:
break
print('>>>', process.before.decode('utf8', errors='replace'), flush=True)
# send EOF to close the pipe, then terminate the process
process.sendeof()
process.kill(1)
process.wait()
使其与您的原始设置保持接近,如下所示:
pexpect
因此,每当我们向管道发送整行时,我们还会寻找超时时间短的相反方向的行,并回显任何此类行。
已完成所有修复(确保避免对JSON消息进行多次编码)和非常简单的echoing websocket server,上面的>>> {"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
>>> {"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
>>> {"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
>>> {"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
>>> {"header":{"protocolVersion":1,"messageID":2,"stationID":400}}
代码会显示:
{{1}}
显示从父进程到子进程再到websocket并返回的完整路径。