Вы можете справиться с этим или сделать какую-то другую логику:
protected override async Task RouteAsync(DialogContext dc, CancellationToken cancellationToken = default(CancellationToken))
{
// Get cognitive models for locale
var locale = CultureInfo.CurrentUICulture.TwoLetterISOLanguageName;
var cognitiveModels = _services.CognitiveModelSets[locale];
// Check dispatch result
var dispatchResult = await cognitiveModels.DispatchService.RecognizeAsync<DispatchLuis>(dc.Context, CancellationToken.None);
var intent = dispatchResult.TopIntent().intent;
// Identify if the dispatch intent matches any Action within a Skill if so, we pass to the appropriate SkillDialog to hand-off
var identifiedSkill = SkillRouter.IsSkill(_settings.Skills, intent.ToString());
if (identifiedSkill != null)
{
// We have identiifed a skill so initialize the skill connection with the target skill
await dc.BeginDialogAsync(identifiedSkill.Id);
// Pass the activity we have
var result = await dc.ContinueDialogAsync();
if (result.Status == DialogTurnStatus.Complete)
{
await CompleteAsync(dc);
}
}
else if (intent == DispatchLuis.Intent.l_general)
{
// If dispatch result is general luis model
cognitiveModels.LuisServices.TryGetValue("general", out var luisService);
if (luisService == null)
{
throw new Exception("The general LUIS Model could not be found in your Bot Services configuration.");
}
else
{
var result = await luisService.RecognizeAsync<GeneralLuis>(dc.Context, CancellationToken.None);
var generalIntent = result?.TopIntent().intent;
// switch on general intents
switch (generalIntent)
{
case GeneralLuis.Intent.Escalate:
{
// start escalate dialog
await dc.BeginDialogAsync(nameof(EscalateDialog));
break;
}
case GeneralLuis.Intent.None:
default:
{
// No intent was identified, send confused message
await _responder.ReplyWith(dc.Context, MainResponses.ResponseIds.Confused);
break;
}
}
}
}
else if (intent == DispatchLuis.Intent.q_faq)
{
cognitiveModels.QnAServices.TryGetValue("faq", out var qnaService);
if (qnaService == null)
{
throw new Exception("The specified QnA Maker Service could not be found in your Bot Services configuration.");
}
else
{
var answers = await qnaService.GetAnswersAsync(dc.Context, null, null);
if (answers != null && answers.Count() > 0)
{
await dc.Context.SendActivityAsync(answers[0].Answer, speak: answers[0].Answer);
}
else
{
await _responder.ReplyWith(dc.Context, MainResponses.ResponseIds.Confused);
}
}
}
else if (intent == DispatchLuis.Intent.q_chitchat)
{
cognitiveModels.QnAServices.TryGetValue("chitchat", out var qnaService);
if (qnaService == null)
{
throw new Exception("The specified QnA Maker Service could not be found in your Bot Services configuration.");
}
else
{
var answers = await qnaService.GetAnswersAsync(dc.Context, null, null);
if (answers != null && answers.Count() > 0)
{
await dc.Context.SendActivityAsync(answers[0].Answer, speak: answers[0].Answer);
}
else
{
await _responder.ReplyWith(dc.Context, MainResponses.ResponseIds.Confused);
}
}
}
else
{
// If dispatch intent does not map to configured models, send "confused" response.
await _responder.ReplyWith(dc.Context, MainResponses.ResponseIds.Confused);
}
}
Я взял это из официальных образцов Microsoft. Дайте мне знать, если есть какие-либо сомнения.
Пожалуйста, посмотрите на это: https://github.com/microsoft/botframework-solutions/blob/master/templates/Virtual-Assistant-Template/csharp/Template/VA/Dialogs/MainDialog.cs