MS 文本分析认知服务:如何使用本地数据库?
MS Text Analytics Cognitive Service: how to work with local database?
Microsoft 提供了一种名为文本分析认知服务的文本数据分析服务。
是否可以将此服务与本地数据库一起使用?即不在 Azure
我使用一些大型数据库,对我来说,将它用于:
语言检测
关键词提取
命名实体识别
情绪分析
Once you pull your data that you would like to detect its language
from your local database, you just need to fetch it then just pass in
below method. It would analysis your value in response.
API Access Keys:
private static readonly string endpointKey = "YourEndPonitKey";
private static readonly string endpoint = "https://YourServiceURL.cognitiveservices.azure.com/text/analytics/v2.1/languages";
Code Snippet:
public async Task<object> DetectLanguageAsync(string InputFromDbOrUser)
{
try
{
DetectedLanguageResponseModel ObjDitectedLanguageResponse = new DetectedLanguageResponseModel();
//Create laguage detection request param
RequestModel objRequestModel = new RequestModel();
objRequestModel.id = "1";
objRequestModel.text = InputFromDbOrUser;
//Made Document List
List<RequestModel> Objdocuments = new List<RequestModel>();
Objdocuments.Add(objRequestModel);
//Bind Request Model
LanguageDetection objRequestList = new LanguageDetection();
objRequestList.documents = Objdocuments;
// Bind and Serialize Request Object
var serializedObject = JsonConvert.SerializeObject(objRequestList);
// Call Language Detection API
using (var client = new HttpClient())
using (var request = new HttpRequestMessage())
{
request.Method = HttpMethod.Post;
request.RequestUri = new Uri(endpoint);
request.Content = new StringContent(serializedObject, Encoding.UTF8, "application/json");
request.Headers.Add("Ocp-Apim-Subscription-Key", endpointKey);
var response = await client.SendAsync(request);
//Check status code and retrive response
if (response.IsSuccessStatusCode)
{
ResponseModel objResponse = JsonConvert.DeserializeObject<ResponseModel>(await response.Content.ReadAsStringAsync());
//Check Response List
foreach (var item in objResponse.documents)
{
//Checkings Empty Response and Return to Caller
if (objResponse.documents != null)
{
ObjDitectedLanguageResponse.Language = objResponse.documents[0].detectedLanguages[0].name;
return ObjDitectedLanguageResponse;
}
else
{
return "Sorry, I am not able to find a related topic! Would you like me to Bing Search?";
}
}
}
else
{
var result_string = await response.Content.ReadAsStringAsync();
return result_string;
}
}
return ObjDitectedLanguageResponse;
}
catch (Exception ex)
{
throw new NotImplementedException(ex.Message, ex.InnerException);
}
}
Class Used:
public class DetectedLanguage
{
public string name { get; set; }
public string iso6391Name { get; set; }
}
public class DetectedLanguageResponseModel
{
public dynamic Language { get; set; }
}
public class LanguageDetection
{
public List<RequestModel> documents { get; set; }
}
public class RequestModel
{
public string id { get; set; }
public string text { get; set; }
}
public class ResponseDocument
{
public string id { get; set; }
public List<DetectedLanguage> detectedLanguages { get; set; }
}
public class ResponseModel
{
public List<ResponseDocument> documents { get; set; }
public List<object> errors { get; set; }
}
Note: The current limit is 5,120 characters for each document; if you need to analyze larger documents, you can break them up into
smaller chunks for more you could refer official document
希望对您有所帮助。如果您需要更多实施帮助 please have a look on here
Microsoft 提供了一种名为文本分析认知服务的文本数据分析服务。
是否可以将此服务与本地数据库一起使用?即不在 Azure
我使用一些大型数据库,对我来说,将它用于: 语言检测 关键词提取 命名实体识别 情绪分析
Once you pull your data that you would like to detect its language from your local database, you just need to fetch it then just pass in below method. It would analysis your value in response.
API Access Keys:
private static readonly string endpointKey = "YourEndPonitKey";
private static readonly string endpoint = "https://YourServiceURL.cognitiveservices.azure.com/text/analytics/v2.1/languages";
Code Snippet:
public async Task<object> DetectLanguageAsync(string InputFromDbOrUser)
{
try
{
DetectedLanguageResponseModel ObjDitectedLanguageResponse = new DetectedLanguageResponseModel();
//Create laguage detection request param
RequestModel objRequestModel = new RequestModel();
objRequestModel.id = "1";
objRequestModel.text = InputFromDbOrUser;
//Made Document List
List<RequestModel> Objdocuments = new List<RequestModel>();
Objdocuments.Add(objRequestModel);
//Bind Request Model
LanguageDetection objRequestList = new LanguageDetection();
objRequestList.documents = Objdocuments;
// Bind and Serialize Request Object
var serializedObject = JsonConvert.SerializeObject(objRequestList);
// Call Language Detection API
using (var client = new HttpClient())
using (var request = new HttpRequestMessage())
{
request.Method = HttpMethod.Post;
request.RequestUri = new Uri(endpoint);
request.Content = new StringContent(serializedObject, Encoding.UTF8, "application/json");
request.Headers.Add("Ocp-Apim-Subscription-Key", endpointKey);
var response = await client.SendAsync(request);
//Check status code and retrive response
if (response.IsSuccessStatusCode)
{
ResponseModel objResponse = JsonConvert.DeserializeObject<ResponseModel>(await response.Content.ReadAsStringAsync());
//Check Response List
foreach (var item in objResponse.documents)
{
//Checkings Empty Response and Return to Caller
if (objResponse.documents != null)
{
ObjDitectedLanguageResponse.Language = objResponse.documents[0].detectedLanguages[0].name;
return ObjDitectedLanguageResponse;
}
else
{
return "Sorry, I am not able to find a related topic! Would you like me to Bing Search?";
}
}
}
else
{
var result_string = await response.Content.ReadAsStringAsync();
return result_string;
}
}
return ObjDitectedLanguageResponse;
}
catch (Exception ex)
{
throw new NotImplementedException(ex.Message, ex.InnerException);
}
}
Class Used:
public class DetectedLanguage
{
public string name { get; set; }
public string iso6391Name { get; set; }
}
public class DetectedLanguageResponseModel
{
public dynamic Language { get; set; }
}
public class LanguageDetection
{
public List<RequestModel> documents { get; set; }
}
public class RequestModel
{
public string id { get; set; }
public string text { get; set; }
}
public class ResponseDocument
{
public string id { get; set; }
public List<DetectedLanguage> detectedLanguages { get; set; }
}
public class ResponseModel
{
public List<ResponseDocument> documents { get; set; }
public List<object> errors { get; set; }
}
Note: The current limit is 5,120 characters for each document; if you need to analyze larger documents, you can break them up into smaller chunks for more you could refer official document
希望对您有所帮助。如果您需要更多实施帮助 please have a look on here