Spark Job Job的节点Http Post参数

时间:2016-09-20 19:27:12

标签: node.js http curl spark-jobserver

以下curl命令可以完美地调用,传递参数并执行我的" jobified"火花计划

curl 'http://someserver:8090/jobs?appName=secondtest&classPath=Works.epJob&context=hiveContext' -d "inputparms=/somepath1 /somepath2"

这是火花程序

override def runJob(hive: HiveContext, config: Config):Any = {  
var inputParms = config.getString("inputparms").split(" "); //comes from node
var path1 = inputParms.apply(0)
var path2 = inputParms.apply(1)

而不是curl命令,我需要在node.js中做一个http帖子。这就是我所拥有的

var postData = JSON.stringify({
  "inputparms": paths
})

var options = {
hostname: 'someserver',
port: 8090,
path: '/jobs?appName=secondtest&classPath=Works.epJob context=hiveContext',
method: 'POST',
headers: {
    'Content-Type': 'application/json',
    'Content-Length': Buffer.byteLength(postData , 'utf8')
}
};

 http.request(options, function(response) {...

以上脚本无法正常工作。我错过了什么吗? 谢谢!

编辑1:

 var myreq = http.request(options, function(response) { ...})
 myreq.write(postData);
 myreq.end();

我得到一个解析错误

Error: Parse Error
at Error (native)
at Socket.socketOnData (_http_client.js:361:20)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:177:18)
at Socket.Readable.push (_stream_readable.js:135:10)
at TCP.onread (net.js:542:20) bytesParsed: 2, code: 'HPE_INVALID_CONSTANT' }

1 个答案:

答案 0 :(得分:0)

以下为我工作

var http = require("http");
var options = {
  hostname: 'localhost',
  port: 8090,
  path: '/jobs?appName=test&classPath=spark.jobserver.LongPiJob',
  method: 'POST',
  headers: {
      'Content-Type': 'application/json',
  }
};
var req = http.request(options, function(res) {
  console.log('Status: ' + res.statusCode);
  console.log('Headers: ' + JSON.stringify(res.headers));
  res.setEncoding('utf8');
  res.on('data', function (body) {
    console.log('Body: ' + body);
  });
});
req.on('error', function(e) {
  console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('stress.test.longpijob.duration=120');
req.end();