-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathprocessVideoExample.cs
88 lines (70 loc) · 3.47 KB
/
processVideoExample.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
/*
Example usage of Azure Cognitive AI Service
This method will consume a language model by passing text from a YouTube description to a Named Entity Recognition Model.
The model is trained on a corpus of data looking for specific keywords.
The API returns a Categorized Entity and Confidence Score. If confidence score is high enough, save the entity
for further aggregation.
See:
https://github.com/Azure/azure-sdk-for-net/blob/Azure.AI.TextAnalytics_5.3.0/sdk/textanalytics/Azure.AI.TextAnalytics/README.md
https://github.com/Azure/azure-sdk-for-net/blob/main/sdk/textanalytics/Azure.AI.TextAnalytics/samples/Sample8_RecognizeCustomEntities.md
*/
private static async Task processRawDescritpionNERAI(Models.Video video, ILogger log){
try {
log.LogInformation($"Start processing AI for videoID: {video.VideoId}");
string endpoint = Environment.GetEnvironmentVariable("language_ai_endpoint");
string apiKey = Environment.GetEnvironmentVariable("language_ai_api_key");
var client = new TextAnalyticsClient(new Uri(endpoint), new AzureKeyCredential(apiKey));
var batchDocuments = new List<TextDocumentInput>();
if(string.IsNullOrEmpty(video.DescriptionRaw)) {
return;
}
batchDocuments.Add(
new TextDocumentInput("1", video.DescriptionRaw)
{
Language = "en",
});
string projectName = Environment.GetEnvironmentVariable("language_ai_project_name");
string deploymentName = Environment.GetEnvironmentVariable("language_ai_deployment_name");
var actions = new TextAnalyticsActions()
{
RecognizeCustomEntitiesActions = new List<RecognizeCustomEntitiesAction>()
{
new RecognizeCustomEntitiesAction(projectName, deploymentName)
}
};
AnalyzeActionsOperation operation = await client.StartAnalyzeActionsAsync(batchDocuments, actions);
await operation.WaitForCompletionAsync();
await foreach (AnalyzeActionsResult documentsInPage in operation.Value)
{
IReadOnlyCollection<RecognizeCustomEntitiesActionResult> customEntitiesActionResults = documentsInPage.RecognizeCustomEntitiesResults;
foreach (RecognizeCustomEntitiesActionResult customEntitiesActionResult in customEntitiesActionResults)
{
foreach (RecognizeEntitiesResult documentResults in customEntitiesActionResult.DocumentsResults)
{
var processedText = new StringBuilder();
foreach (CategorizedEntity entity in documentResults.Entities)
{
var ner = new NER(){
Text = entity.Text,
Category = (string)entity.Category,
ConfidenceScore = entity.ConfidenceScore,
Length = entity.Length,
Offset = entity.Offset,
SubCategory = entity.SubCategory
};
video.CategorizedEntities.Add(ner);
if(entity.ConfidenceScore >= .8) {
processedText.Append(entity.Text);
processedText.AppendLine();
}
}
video.ProcessedText = processedText.ToString();
}
}
}
log.LogInformation($"Completed processing AI for videoID: {video.VideoId}");
} catch(Exception ex){
log.LogError($"Error Processing AI for videoID: {video.VideoId}");
log.LogError($"Message: {ex.Message}");
}
}