Spark 作业服务器的节点 Http Post 参数
Node Http Post argument to Spark Job Server
以下 curl 命令可以完美地调用、传递参数和执行我的 "jobified" spark 程序
curl 'http://someserver:8090/jobs?appName=secondtest&classPath=Works.epJob&context=hiveContext' -d "inputparms=/somepath1 /somepath2"
这是spark程序
override def runJob(hive: HiveContext, config: Config):Any = {
var inputParms = config.getString("inputparms").split(" "); //comes from node
var path1 = inputParms.apply(0)
var path2 = inputParms.apply(1)
我需要在 node.js 中执行 http post 而不是 curl 命令。这是我的
var postData = JSON.stringify({
"inputparms": paths
})
var options = {
hostname: 'someserver',
port: 8090,
path: '/jobs?appName=secondtest&classPath=Works.epJob context=hiveContext',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(postData , 'utf8')
}
};
http.request(options, function(response) {...
但是上面的脚本不起作用。我错过了什么吗?
谢谢!
编辑 1:
var myreq = http.request(options, function(response) { ...})
myreq.write(postData);
myreq.end();
我收到一个解析错误
Error: Parse Error
at Error (native)
at Socket.socketOnData (_http_client.js:361:20)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:177:18)
at Socket.Readable.push (_stream_readable.js:135:10)
at TCP.onread (net.js:542:20) bytesParsed: 2, code: 'HPE_INVALID_CONSTANT' }
以下适合我
var http = require("http");
var options = {
hostname: 'localhost',
port: 8090,
path: '/jobs?appName=test&classPath=spark.jobserver.LongPiJob',
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
};
var req = http.request(options, function(res) {
console.log('Status: ' + res.statusCode);
console.log('Headers: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (body) {
console.log('Body: ' + body);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('stress.test.longpijob.duration=120');
req.end();
以下 curl 命令可以完美地调用、传递参数和执行我的 "jobified" spark 程序
curl 'http://someserver:8090/jobs?appName=secondtest&classPath=Works.epJob&context=hiveContext' -d "inputparms=/somepath1 /somepath2"
这是spark程序
override def runJob(hive: HiveContext, config: Config):Any = {
var inputParms = config.getString("inputparms").split(" "); //comes from node
var path1 = inputParms.apply(0)
var path2 = inputParms.apply(1)
我需要在 node.js 中执行 http post 而不是 curl 命令。这是我的
var postData = JSON.stringify({
"inputparms": paths
})
var options = {
hostname: 'someserver',
port: 8090,
path: '/jobs?appName=secondtest&classPath=Works.epJob context=hiveContext',
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Content-Length': Buffer.byteLength(postData , 'utf8')
}
};
http.request(options, function(response) {...
但是上面的脚本不起作用。我错过了什么吗? 谢谢!
编辑 1:
var myreq = http.request(options, function(response) { ...})
myreq.write(postData);
myreq.end();
我收到一个解析错误
Error: Parse Error
at Error (native)
at Socket.socketOnData (_http_client.js:361:20)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:177:18)
at Socket.Readable.push (_stream_readable.js:135:10)
at TCP.onread (net.js:542:20) bytesParsed: 2, code: 'HPE_INVALID_CONSTANT' }
以下适合我
var http = require("http");
var options = {
hostname: 'localhost',
port: 8090,
path: '/jobs?appName=test&classPath=spark.jobserver.LongPiJob',
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
};
var req = http.request(options, function(res) {
console.log('Status: ' + res.statusCode);
console.log('Headers: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (body) {
console.log('Body: ' + body);
});
});
req.on('error', function(e) {
console.log('problem with request: ' + e.message);
});
// write data to request body
req.write('stress.test.longpijob.duration=120');
req.end();