我需要能够从我的socket.io事件侦听器访问req。所以我这样做了:
服务器
var express = require('express'),
app = express(),
app.set('view engine', 'pug');
app.use(express.static(__dirname + '/public'));
client = require('socket.io').listen(8080).sockets;
app.get('/', function (req, res) {
client.on('connection', function (socket) {
console.log("Connection")
});
res.render('chat');
});
app.listen(config.server.port, function() {
console.log("Listening on port " + config.server.port);
});
客户端:
try {
var socket = io.connect('http://127.0.0.1:8080');
} catch(e) {
//Set status to warn user
console.log(e);
}
问题是,如果在快速路由处理程序中添加socket.io事件侦听器,则会在套接字上创建多个侦听器。如果您要创建pug文件并测试此代码,您会注意到控制台记录" connection"一旦第一次刷新第二次,依此类推,因为每次处理路由时,都会添加另一个事件监听器。我可以通过将侦听器移动到路由处理程序之外来修复它,但是我需要能够访问" req"。有没有任何解决方案可以让我访问" req"并防止过多的听众被添加?
答案 0 :(得分:2)
不幸的是,socket.io req
事件监听器中访问的connection
对象是声明事件监听器时的req
,而不是事件监听器时的req
被执行。因此,问题中提到的预期行为(如果我的理解是正确的)是不可能的。
这是一个简单的实验,对于有问题的代码:
app.get('/', function (req, res) {
client.on('connection', function (socket) {
console.log("req.url: " + req.url)
});
res.render('chat');
});
如果使用浏览器发送2个HTTP请求:首先是GET /?q=42
,然后是GET /?q=88
,则console.log
结果将是:
//first request
req.url: /?q=42
//second request
req.url: /?q=42
req.url: /?q=88
对于第二个请求,当connection
事件被侦听两次时,事件侦听器也将被执行两次。但是,执行结果不同 - 第一个HTTP请求中附加的事件侦听器在那时记住 req
对象值。
如果只有一个客户端,并且没有并发请求(非常有限的情况),则有一种解决方法 - 将req
保存为currentReq
,并使事件监听器处理{{1} }:
currentReq
这里有一些想法为什么这是不可能的。
问题中的情景是:
var currentReq;
var isListened = false;
// write logic in middleware, so it can be used in all routes.
app.use(function(req, res, next) {
currentReq = req;
if (!isListened) {
client.on('connection', function (socket) {
console.log("req.url: " + currentReq.url)
});
isListened = true;
}
next();
});
app.get('/', function (req, res) {
res.render('chat');
});
请求很明显,当GET
事件发生时(步骤5),HTTP connection
(步骤1)早已消失。由于WebSocket和HTTP在不同端口上的连接不同,因此无法在步骤5中恢复初始HTTP请求信息。
答案 1 :(得分:0)
您可以通过控制台记录IsADirectoryError: [Errno 21] Is a directory: '/airflow/key/'
[2019-09-10 04:56:59 +0000] [11] [INFO] Handling signal: ttou
[2019-09-10 04:56:59 +0000] [5058] [INFO] Worker exiting (pid: 5058)
[2019-09-10 04:57:29 +0000] [11] [INFO] Handling signal: ttin
[2019-09-10 04:57:29 +0000] [5078] [INFO] Booting worker with pid: 5078
[2019-09-10 04:57:30,043] {__init__.py:51} INFO - Using executor KubernetesExecutor
[2019-09-10 04:57:30,276] {dagbag.py:90} INFO - Filling up the DagBag from /airflow/dags/git
[2019-09-10 04:57:30,810] {dagbag.py:205} ERROR - Failed to import: /airflow/dags/git/dag_test.py
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/airflow/models/dagbag.py", line 202, in process_file
m = imp.load_source(mod_name, filepath)
File "/usr/local/lib/python3.7/imp.py", line 171, in load_source
module = _load(spec)
File "<frozen importlib._bootstrap>", line 696, in _load
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 728, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/airflow/dags/git/dag_test.py", line 50, in <module>
a = get_data_from_bigquery();
File "/airflow/dags/git/dag_test.py", line 38, in get_data_from_bigquery
bq = bigquery.Client()
File "/usr/local/lib/python3.7/site-packages/google/cloud/bigquery/client.py", line 173, in __init__
project=project, credentials=credentials, _http=_http
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 226, in __init__
_ClientProjectMixin.__init__(self, project=project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 178, in __init__
project = self._determine_default(project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 193, in _determine_default
return _determine_default_project(project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/_helpers.py", line 186, in _determine_default_project
_, project = google.auth.default()
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 305, in default
credentials, project_id = checker()
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 165, in _get_explicit_environ_credentials
os.environ[environment_vars.CREDENTIALS])
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 91, in _load_credentials_from_file
with io.open(filename, 'r') as file_obj:
IsADirectoryError: [Errno 21] Is a directory: '/airflow/key/'
[2019-09-10 04:57:31 +0000] [11] [INFO] Handling signal: ttou
[2019-09-10 04:57:31 +0000] [5062] [INFO] Worker exiting (pid: 5062)
[2019-09-10 04:58:02 +0000] [11] [INFO] Handling signal: ttin
[2019-09-10 04:58:02 +0000] [5082] [INFO] Booting worker with pid: 5082
[2019-09-10 04:58:02,692] {__init__.py:51} INFO - Using executor KubernetesExecutor
[2019-09-10 04:58:02,932] {dagbag.py:90} INFO - Filling up the DagBag from /airflow/dags/git
[2019-09-10 04:58:03,438] {dagbag.py:205} ERROR - Failed to import: /airflow/dags/git/dag_test.py
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/airflow/models/dagbag.py", line 202, in process_file
m = imp.load_source(mod_name, filepath)
File "/usr/local/lib/python3.7/imp.py", line 171, in load_source
module = _load(spec)
File "<frozen importlib._bootstrap>", line 696, in _load
File "<frozen importlib._bootstrap>", line 677, in _load_unlocked
File "<frozen importlib._bootstrap_external>", line 728, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/airflow/dags/git/dag_test.py", line 50, in <module>
a = get_data_from_bigquery();
File "/airflow/dags/git/dag_test.py", line 38, in get_data_from_bigquery
bq = bigquery.Client()
File "/usr/local/lib/python3.7/site-packages/google/cloud/bigquery/client.py", line 173, in __init__
project=project, credentials=credentials, _http=_http
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 226, in __init__
_ClientProjectMixin.__init__(self, project=project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 178, in __init__
project = self._determine_default(project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/client.py", line 193, in _determine_default
return _determine_default_project(project)
File "/usr/local/lib/python3.7/site-packages/google/cloud/_helpers.py", line 186, in _determine_default_project
_, project = google.auth.default()
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 305, in default
credentials, project_id = checker()
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 165, in _get_explicit_environ_credentials
os.environ[environment_vars.CREDENTIALS])
File "/usr/local/lib/python3.7/site-packages/google/auth/_default.py", line 91, in _load_credentials_from_file
with io.open(filename, 'r') as file_obj:
IsADirectoryError: [Errno 21] Is a directory: '/airflow/key/'
io.sockets
要解决您的问题,只需在初始化套接字连接之前放置此条件
Namespace {
_events:
{ connection: [ [Function], [Function], [Function], [Function] ] },
_eventsCount: 1 }
如果 if(io.sockets._events == undefined) {
io.on('connection', socket => {
...
});
}
之外还有其他事件侦听器
connection
答案 2 :(得分:0)
聚会有点晚了,但似乎 OP 想要访问 req 对象以验证登录。将我的 2 便士添加到组合中,这是我在类似情况下所做的:
我的应用程序使用基于 cookie 的登录令牌(仅通过 https!),我能够使用
访问登录 cookiefunction cookieParser(cookief) {
let cookies = {}
let _cookies = cookief.split("; ")
for(cookiekv of cookief.split("; ")) {
let kv = cookiekv.split("=")
cookies[kv[0]] = kv[1]
}
return cookies
}
function verifyLogin(cookies) {
//verify login cookies here. Change this example
return cookies.loginToken
}
io.use(function(socket,next) {
const cookies = cookieParser(socket.handshake.headers.cookie)
if(verifyLogin(cookies)) {
next()
} else {
next(new Error("invalid login token"))
}
})
这允许我只接受来自已登录并收到登录令牌的用户的套接字连接。这也应该解决您的多个侦听器方案,因为在 io
上仅注册了一个侦听器。
希望这会有所帮助!