使用 Axios 面对 API 和 node.js
Face API with node.js using Axios
我正在使用 Azure 的认知服务 Face API 来分析图像。我早些时候使用 url 图像链接使它正常工作,但我试图让它与本地图像一起工作。我发现 问这正是我在 request.post
中寻找的内容,但我正试图让它与 axios 一起工作。
我的代码:
"use strict";
const axios = require("axios").default;
const fs = require("fs");
let file = "localFile.jpg";
const imageBuffer = fs.readFileSync(file, { encoding: "base64" });
// Add a valid subscription key and endpoint to your environment variables.
var subscriptionKey = "<Subscription key>";
var urlApi=
"https://.../face/v1.0/detect";
// Send a POST request
axios({
method: "post",
url: urlApi,
params: {
returnFaceId: true,
returnFaceLandmarks: false,
returnFaceAttributes:
"age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise",
},
headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
data: imageBuffer,
})
.then((response) => {
console.log("Status text: " + response.status);
console.log("Status text: " + response.statusText);
console.log();
//console.log(response.data)
response.data.forEach((face) => {
console.log("Face ID: " + face.faceId);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Head pose: " + JSON.stringify(face.faceAttributes.headPose)
);
console.log("Gender: " + face.faceAttributes.gender);
console.log("Age: " + face.faceAttributes.age);
console.log(
"Facial hair: " + JSON.stringify(face.faceAttributes.facialHair)
);
console.log("Glasses: " + face.faceAttributes.glasses);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Emotion: " + JSON.stringify(face.faceAttributes.emotion)
);
console.log("Blur: " + JSON.stringify(face.faceAttributes.blur));
console.log(
"Exposure: " + JSON.stringify(face.faceAttributes.exposure)
);
console.log("Noise: " + JSON.stringify(face.faceAttributes.noise));
console.log(
"Makeup: " + JSON.stringify(face.faceAttributes.makeup)
);
console.log(
"Accessories: " +
JSON.stringify(face.faceAttributes.accessories)
);
console.log("Hair: " + JSON.stringify(face.faceAttributes.hair));
console.log();
});
})
.catch(function (error) {
console.log(error);
});
我原以为会得到类似 json 格式的输出,但我得到的却是 status: 400, statusText: 'Bad Request'
,我不确定为什么。如有任何帮助,我们将不胜感激。
我也试过了
const imageBuffer = fs.readFileSync(file);
和
const options = [
"returnFaceId=true",
"returnFaceLandmarks=true",
"returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,accessories",
];
urlApi = urlApi.concat("?", options.join("&"));
axios.post(urlApi,
{ body: imageBuffer},
{ headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
)
详细错误回复:(应Jim Xu的要求)
response: {
status: 400,
statusText: 'Bad Request',
headers: {
'transfer-encoding': 'chunked',
'content-type': 'application/json; charset=utf-8',
'x-envoy-upstream-service-time': '8',
'apim-request-id': 'cb7f0380-a785-49f5-94fe-ff5ed0e36742',
'strict-transport-security': 'max-age=31536000; includeSubDomains; preload',
'x-content-type-options': 'nosniff',
'csp-billing-usage': 'CognitiveServices.Face.Transaction=1',
date: 'Sun, 20 Sep 2020 13:55:45 GMT',
connection: 'close'
}
如果要在node环境下运行Axios
,我们应该使用Stream, Buffer
作为请求体数据。详情请参考here.
例如
axios
.post(
"<your face api endpoint>",
fs.readFileSync("D:\faces.jpg"),
{
headers: {
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Type": "application/octet-stream",
},
params: {
returnFaceId: true,
returnFaceLandmarks: false,
returnFaceAttributes:
"age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise",
},
}
)
.then((response) => {
console.log("Status text: " + response.status);
console.log("Status text: " + response.statusText);
console.log();
//console.log(response.data)
response.data.forEach((face) => {
console.log("Face ID: " + face.faceId);
console.log(
"Face rectangle: " +
face.faceRectangle.top +
", " +
face.faceRectangle.left +
", " +
face.faceRectangle.width +
", " +
face.faceRectangle.height
);
console.log("Smile: " + face.faceAttributes.smile);
console.log("Head pose: " + JSON.stringify(face.faceAttributes.headPose));
console.log("Gender: " + face.faceAttributes.gender);
console.log("Age: " + face.faceAttributes.age);
console.log(
"Facial hair: " + JSON.stringify(face.faceAttributes.facialHair)
);
console.log("Glasses: " + face.faceAttributes.glasses);
console.log("Smile: " + face.faceAttributes.smile);
console.log("Emotion: " + JSON.stringify(face.faceAttributes.emotion));
console.log("Blur: " + JSON.stringify(face.faceAttributes.blur));
console.log("Exposure: " + JSON.stringify(face.faceAttributes.exposure));
console.log("Noise: " + JSON.stringify(face.faceAttributes.noise));
console.log("Makeup: " + JSON.stringify(face.faceAttributes.makeup));
console.log(
"Accessories: " + JSON.stringify(face.faceAttributes.accessories)
);
console.log("Hair: " + JSON.stringify(face.faceAttributes.hair));
console.log();
});
})
.catch((err) => {
throw err;
});
我正在使用 Azure 的认知服务 Face API 来分析图像。我早些时候使用 url 图像链接使它正常工作,但我试图让它与本地图像一起工作。我发现 request.post
中寻找的内容,但我正试图让它与 axios 一起工作。
我的代码:
"use strict";
const axios = require("axios").default;
const fs = require("fs");
let file = "localFile.jpg";
const imageBuffer = fs.readFileSync(file, { encoding: "base64" });
// Add a valid subscription key and endpoint to your environment variables.
var subscriptionKey = "<Subscription key>";
var urlApi=
"https://.../face/v1.0/detect";
// Send a POST request
axios({
method: "post",
url: urlApi,
params: {
returnFaceId: true,
returnFaceLandmarks: false,
returnFaceAttributes:
"age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise",
},
headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
data: imageBuffer,
})
.then((response) => {
console.log("Status text: " + response.status);
console.log("Status text: " + response.statusText);
console.log();
//console.log(response.data)
response.data.forEach((face) => {
console.log("Face ID: " + face.faceId);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Head pose: " + JSON.stringify(face.faceAttributes.headPose)
);
console.log("Gender: " + face.faceAttributes.gender);
console.log("Age: " + face.faceAttributes.age);
console.log(
"Facial hair: " + JSON.stringify(face.faceAttributes.facialHair)
);
console.log("Glasses: " + face.faceAttributes.glasses);
console.log("Smile: " + face.faceAttributes.smile);
console.log(
"Emotion: " + JSON.stringify(face.faceAttributes.emotion)
);
console.log("Blur: " + JSON.stringify(face.faceAttributes.blur));
console.log(
"Exposure: " + JSON.stringify(face.faceAttributes.exposure)
);
console.log("Noise: " + JSON.stringify(face.faceAttributes.noise));
console.log(
"Makeup: " + JSON.stringify(face.faceAttributes.makeup)
);
console.log(
"Accessories: " +
JSON.stringify(face.faceAttributes.accessories)
);
console.log("Hair: " + JSON.stringify(face.faceAttributes.hair));
console.log();
});
})
.catch(function (error) {
console.log(error);
});
我原以为会得到类似 json 格式的输出,但我得到的却是 status: 400, statusText: 'Bad Request'
,我不确定为什么。如有任何帮助,我们将不胜感激。
我也试过了
const imageBuffer = fs.readFileSync(file);
和
const options = [
"returnFaceId=true",
"returnFaceLandmarks=true",
"returnFaceAttributes=age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,accessories",
];
urlApi = urlApi.concat("?", options.join("&"));
axios.post(urlApi,
{ body: imageBuffer},
{ headers: {
"Content-Type": "application/octet-stream",
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Length": imageBuffer.length,
},
)
详细错误回复:(应Jim Xu的要求)
response: {
status: 400,
statusText: 'Bad Request',
headers: {
'transfer-encoding': 'chunked',
'content-type': 'application/json; charset=utf-8',
'x-envoy-upstream-service-time': '8',
'apim-request-id': 'cb7f0380-a785-49f5-94fe-ff5ed0e36742',
'strict-transport-security': 'max-age=31536000; includeSubDomains; preload',
'x-content-type-options': 'nosniff',
'csp-billing-usage': 'CognitiveServices.Face.Transaction=1',
date: 'Sun, 20 Sep 2020 13:55:45 GMT',
connection: 'close'
}
如果要在node环境下运行Axios
,我们应该使用Stream, Buffer
作为请求体数据。详情请参考here.
例如
axios
.post(
"<your face api endpoint>",
fs.readFileSync("D:\faces.jpg"),
{
headers: {
"Ocp-Apim-Subscription-Key": subscriptionKey,
"Content-Type": "application/octet-stream",
},
params: {
returnFaceId: true,
returnFaceLandmarks: false,
returnFaceAttributes:
"age,gender,headPose,smile,facialHair,glasses,emotion,hair,makeup,occlusion,accessories,blur,exposure,noise",
},
}
)
.then((response) => {
console.log("Status text: " + response.status);
console.log("Status text: " + response.statusText);
console.log();
//console.log(response.data)
response.data.forEach((face) => {
console.log("Face ID: " + face.faceId);
console.log(
"Face rectangle: " +
face.faceRectangle.top +
", " +
face.faceRectangle.left +
", " +
face.faceRectangle.width +
", " +
face.faceRectangle.height
);
console.log("Smile: " + face.faceAttributes.smile);
console.log("Head pose: " + JSON.stringify(face.faceAttributes.headPose));
console.log("Gender: " + face.faceAttributes.gender);
console.log("Age: " + face.faceAttributes.age);
console.log(
"Facial hair: " + JSON.stringify(face.faceAttributes.facialHair)
);
console.log("Glasses: " + face.faceAttributes.glasses);
console.log("Smile: " + face.faceAttributes.smile);
console.log("Emotion: " + JSON.stringify(face.faceAttributes.emotion));
console.log("Blur: " + JSON.stringify(face.faceAttributes.blur));
console.log("Exposure: " + JSON.stringify(face.faceAttributes.exposure));
console.log("Noise: " + JSON.stringify(face.faceAttributes.noise));
console.log("Makeup: " + JSON.stringify(face.faceAttributes.makeup));
console.log(
"Accessories: " + JSON.stringify(face.faceAttributes.accessories)
);
console.log("Hair: " + JSON.stringify(face.faceAttributes.hair));
console.log();
});
})
.catch((err) => {
throw err;
});