From 8ae2816c5cd9c901bdf5ad5410b69d3c40b8acea Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 10:17:49 -0800
Subject: [PATCH 1/8] Moved to ORTGenAI's IChatClient.

---
 AIDevGallery/AIDevGallery.csproj              |   3 +-
 AIDevGallery/Helpers/SamplesHelper.cs         |   5 +-
 .../Models/BaseSampleNavigationParameters.cs  |   2 +-
 AIDevGallery/ProjectGenerator/Generator.cs    |  11 +-
 .../RetrievalAugmentedGeneration.xaml.cs      |   2 +-
 .../Embeddings/SemanticSearch.xaml.cs         |   2 +-
 .../Embeddings/SemanticSuggest.xaml.cs        |   2 +-
 .../Language Models/Chat.xaml.cs              |   2 +-
 .../Language Models/ContentModeration.xaml.cs |   2 +-
 .../CustomSystemPrompt.xaml.cs                |   2 +-
 .../Language Models/ExplainCode.xaml.cs       |   2 +-
 .../Language Models/Generate.xaml.cs          |   2 +-
 .../Language Models/GenerateCode.xaml.cs      |   2 +-
 .../Language Models/GrammarCheck.xaml.cs      |   2 +-
 .../Language Models/Paraphrase.xaml.cs        |   2 +-
 .../SemanticKernelChat.xaml.cs                |   1 +
 .../Language Models/SentimentAnalysis.xaml.cs |   2 +-
 .../Language Models/SmartPaste.xaml.cs        |   2 +-
 .../Language Models/SmartText.xaml.cs         |   2 +-
 .../Language Models/Summarize.xaml.cs         |   2 +-
 .../Language Models/Translate.xaml.cs         |   2 +-
 .../SharedCode/IChatClient/GenAIModel.cs      | 288 ------------------
 .../OnnxRuntimeGenAIChatClientFactory.cs      | 139 +++++++++
 .../Samples/WCRAPIs/PhiSilicaBasic.xaml.cs    |   2 +-
 Directory.Packages.props                      |   6 +-
 25 files changed, 171 insertions(+), 318 deletions(-)
 delete mode 100644 AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs
 create mode 100644 AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs

diff --git a/AIDevGallery/AIDevGallery.csproj b/AIDevGallery/AIDevGallery.csproj
index dfa6ea46..23263457 100644
--- a/AIDevGallery/AIDevGallery.csproj
+++ b/AIDevGallery/AIDevGallery.csproj
@@ -59,7 +59,7 @@
     <PackageReference Include="CommunityToolkit.WinUI.UI.Controls.Markdown" />
     <PackageReference Include="MathNet.Numerics" />
     <PackageReference Include="Microsoft.Build" />
-    <PackageReference Include="Microsoft.Extensions.AI.Abstractions" />
+    <PackageReference Include="Microsoft.Extensions.AI" />
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Extensions" />
     <PackageReference Include="Microsoft.ML.Tokenizers" />
     <PackageReference Include="Microsoft.SemanticKernel.Connectors.InMemory" />
@@ -98,6 +98,7 @@
     <PackageReference Include="Microsoft.AI.DirectML" />
     <PackageReference Include="Microsoft.ML.OnnxRuntime.DirectML" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" />
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
   </ItemGroup>
 
   <ItemGroup>
diff --git a/AIDevGallery/Helpers/SamplesHelper.cs b/AIDevGallery/Helpers/SamplesHelper.cs
index c045a9f6..1b184e0c 100644
--- a/AIDevGallery/Helpers/SamplesHelper.cs
+++ b/AIDevGallery/Helpers/SamplesHelper.cs
@@ -19,10 +19,10 @@ public static List<SharedCodeEnum> GetAllSharedCode(this Sample sample, Dictiona
 
         if (isLanguageModel)
         {
-            AddUnique(SharedCodeEnum.GenAIModel);
+            AddUnique(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory);
         }
 
-        if (sharedCode.Contains(SharedCodeEnum.GenAIModel))
+        if (sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory))
         {
             AddUnique(SharedCodeEnum.LlmPromptTemplate);
         }
@@ -59,6 +59,7 @@ public static List<string> GetAllNugetPackageReferences(this Sample sample, Dict
 
         if (isLanguageModel)
         {
+            AddUnique("Microsoft.ML.OnnxRuntimeGenAI.Managed");
             AddUnique("Microsoft.ML.OnnxRuntimeGenAI.DirectML");
         }
 
diff --git a/AIDevGallery/Models/BaseSampleNavigationParameters.cs b/AIDevGallery/Models/BaseSampleNavigationParameters.cs
index 5a4d8a38..52ca2e02 100644
--- a/AIDevGallery/Models/BaseSampleNavigationParameters.cs
+++ b/AIDevGallery/Models/BaseSampleNavigationParameters.cs
@@ -27,7 +27,7 @@ public void NotifyCompletion()
             return await PhiSilicaClient.CreateAsync(CancellationToken).ConfigureAwait(false);
         }
 
-        return await GenAIModel.CreateAsync(ChatClientModelPath, ChatClientPromptTemplate, CancellationToken).ConfigureAwait(false);
+        return await OnnxRuntimeGenAIChatClientFactory.CreateAsync(ChatClientModelPath, ChatClientPromptTemplate, CancellationToken).ConfigureAwait(false);
     }
 
     internal abstract void SendSampleInteractionEvent(string? customInfo = null);
diff --git a/AIDevGallery/ProjectGenerator/Generator.cs b/AIDevGallery/ProjectGenerator/Generator.cs
index ad6397c0..db9d8e80 100644
--- a/AIDevGallery/ProjectGenerator/Generator.cs
+++ b/AIDevGallery/ProjectGenerator/Generator.cs
@@ -296,8 +296,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
                     packageReferenceItem.Condition = "$(Platform) == 'x64'";
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntime.Qnn" ||
-                            packageName == "Microsoft.ML.OnnxRuntimeGenAI" ||
-                            packageName == "Microsoft.ML.OnnxRuntimeGenAI.Managed")
+                            packageName == "Microsoft.ML.OnnxRuntimeGenAI")
                 {
                     packageReferenceItem.Condition = "$(Platform) == 'ARM64'";
                 }
@@ -403,7 +402,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
     private string? GetChatClientLoaderString(List<Samples.SharedCodeEnum> sharedCode, string modelPath, string promptTemplate, bool isPhiSilica, ModelType modelType)
     {
         bool isLanguageModel = ModelDetailsHelper.EqualOrParent(modelType, ModelType.LanguageModels);
-        if (!sharedCode.Contains(SharedCodeEnum.GenAIModel) && !isPhiSilica && !isLanguageModel)
+        if (!sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory) && !isPhiSilica && !isLanguageModel)
         {
             return null;
         }
@@ -413,7 +412,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
             return "PhiSilicaClient.CreateAsync()";
         }
 
-        return $"GenAIModel.CreateAsync({modelPath}, {promptTemplate})";
+        return $"OnnxRuntimeGenAIChatClientFactory.CreateAsync({modelPath}, {promptTemplate})";
     }
 
     private static async Task CopyFileAsync(string sourceFile, string destinationFile, CancellationToken cancellationToken)
@@ -601,9 +600,9 @@ private async Task<string> AddFilesFromSampleAsync(
                 }
             }
 
-            if (sharedCode.Contains(SharedCodeEnum.GenAIModel))
+            if (sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory))
             {
-                cleanCsSource = RegexInitializeComponent().Replace(cleanCsSource, $"$1this.InitializeComponent();$1GenAIModel.InitializeGenAI();");
+                cleanCsSource = RegexInitializeComponent().Replace(cleanCsSource, $"$1this.InitializeComponent();$1OnnxRuntimeGenAIChatClientFactory.InitializeGenAI();");
             }
 
             await File.WriteAllTextAsync(Path.Join(outputPath, $"Sample.xaml.cs"), cleanCsSource, cancellationToken);
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs
index cbec3f44..7239a100 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs	
@@ -42,7 +42,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "Microsoft.ML.Tokenizers",
         "System.Numerics.Tensors",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "9C1FB14D-4841-449C-9563-4551106BB693",
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs
index a5a29d84..c5d86321 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs	
@@ -35,7 +35,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "System.Numerics.Tensors",
         "Microsoft.ML.Tokenizers",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "41391b3f-f143-4719-a171-b0ce9c4cdcd6",
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs
index 79d76fd9..0249376b 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs	
@@ -26,7 +26,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "System.Numerics.Tensors",
         "Microsoft.ML.Tokenizers",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "c0d6c4f1-8daa-409f-a686-3de388edbf91",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs
index 8b170b94..2f193d0a 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs	
@@ -25,7 +25,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.TextChat,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.Message,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs
index 53ffc2d1..c08a9e39 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextContentModeration,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [],
     Id = "language-content-moderation",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs
index 7f17da5c..23a616b8 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs	
@@ -24,7 +24,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Icon = "\uE8D4",
     Scenario = ScenarioType.TextCustomParameters,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ])]
 internal sealed partial class CustomSystemPrompt : BaseSamplePage, INotifyPropertyChanged
 {
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs
index b9b42c31..5a399899 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.CodeExplainCode,
     SharedCode = [],
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Explain Code",
     Id = "ad763407-6a97-4916-ab05-30fd22f54252",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs
index 082a1ce3..52247a9b 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextGenerateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "25bb4e58-d909-4377-b59c-975cd6baff19",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs
index 2f041936..f7a2887c 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs	
@@ -24,7 +24,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.CodeGenerateCode,
     NugetPackageReferences = [
         "ColorCode.WinUI",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Generate Code",
     Id = "2270c051-a91c-4af9-8975-a99fda6b024b",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs
index a046daba..ebd6da54 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs	
@@ -16,7 +16,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextGrammarCheckText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Grammar Check",
     Id = "9e1b5ac5-3521-4e88-a2ce-60152a6cb44f",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs
index 32915234..e8a7d103 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs	
@@ -17,7 +17,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextParaphraseText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "9e006e82-8e3f-4401-8a83-d4c4c59cc20c",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs
index 60f46950..3bbc451d 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs	
@@ -26,6 +26,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.TextSemanticKernelChat,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Core"
     ],
     SharedCode = [
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs
index 5221b356..ea12676b 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs	
@@ -17,7 +17,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextAnalyzeSentimentText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Sentiment Analysis",
     Id = "9cc84d1e-6b02-4bd2-a350-6e38c3a92ced",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs
index d4c611e9..3f850ea0 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.SmartControlsSmartPaste,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.SmartPasteFormCs,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs
index b427901d..4d5931c2 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs	
@@ -16,7 +16,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Icon = "\uE8D4",
     Scenario = ScenarioType.SmartControlsSmartTextBox,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.SmartTextBoxCs,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs
index 023034ef..3521969f 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs	
@@ -17,7 +17,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextSummarizeText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "21bf3574-aaa5-42fd-9f6c-3bfbbca00876",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs
index a41a35e6..3b794e09 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextTranslateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "f045fca2-c657-4894-99f2-d0a1115176bc",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs b/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs
deleted file mode 100644
index bec3d749..00000000
--- a/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs
+++ /dev/null
@@ -1,288 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-using Microsoft.Extensions.AI;
-using Microsoft.ML.OnnxRuntimeGenAI;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace AIDevGallery.Samples.SharedCode;
-
-internal class GenAIModel : IChatClient
-{
-    private const string TEMPLATE_PLACEHOLDER = "{{CONTENT}}";
-
-    private const int DefaultTopK = 50;
-    private const float DefaultTopP = 0.9f;
-    private const float DefaultTemperature = 1;
-    private const int DefaultMinLength = 0;
-    private const int DefaultMaxLength = 1024;
-    private const bool DefaultDoSample = false;
-
-    private readonly ChatClientMetadata _metadata;
-    private Model? _model;
-    private Tokenizer? _tokenizer;
-    private LlmPromptTemplate? _template;
-    private static readonly SemaphoreSlim _createSemaphore = new(1, 1);
-    private static OgaHandle? _ogaHandle;
-
-    private static ChatOptions GetDefaultChatOptions()
-    {
-        return new ChatOptions
-        {
-            AdditionalProperties = new AdditionalPropertiesDictionary
-            {
-                { "min_length", DefaultMinLength },
-                { "do_sample", DefaultDoSample },
-            },
-            MaxOutputTokens = DefaultMaxLength,
-            Temperature = DefaultTemperature,
-            TopP = DefaultTopP,
-            TopK = DefaultTopK,
-        };
-    }
-
-    private GenAIModel(string modelDir)
-    {
-        _metadata = new ChatClientMetadata("GenAIChatClient", new Uri($"file:///{modelDir}"));
-    }
-
-    public static async Task<GenAIModel?> CreateAsync(string modelDir, LlmPromptTemplate? template = null, CancellationToken cancellationToken = default)
-    {
-#pragma warning disable CA2000 // Dispose objects before losing scope
-        var model = new GenAIModel(modelDir);
-#pragma warning restore CA2000 // Dispose objects before losing scope
-
-        var lockAcquired = false;
-        try
-        {
-            // ensure we call CreateAsync one at a time to avoid fun issues
-            await _createSemaphore.WaitAsync(cancellationToken);
-            lockAcquired = true;
-            cancellationToken.ThrowIfCancellationRequested();
-            await model.InitializeAsync(modelDir, cancellationToken);
-        }
-        catch
-        {
-            model?.Dispose();
-            return null;
-        }
-        finally
-        {
-            if (lockAcquired)
-            {
-                _createSemaphore.Release();
-            }
-        }
-
-        model._template = template;
-        return model;
-    }
-
-    public static void InitializeGenAI()
-    {
-        _ogaHandle = new OgaHandle();
-    }
-
-    [MemberNotNullWhen(true, nameof(_model), nameof(_tokenizer))]
-    public bool IsReady => _model != null && _tokenizer != null;
-
-    public void Dispose()
-    {
-        _model?.Dispose();
-        _tokenizer?.Dispose();
-        _ogaHandle?.Dispose();
-    }
-
-    private string GetPrompt(IList<ChatMessage> history)
-    {
-        if (!history.Any())
-        {
-            return string.Empty;
-        }
-
-        if (_template == null)
-        {
-            return string.Join(". ", history);
-        }
-
-        StringBuilder prompt = new();
-
-        string systemMsgWithoutSystemTemplate = string.Empty;
-
-        for (var i = 0; i < history.Count; i++)
-        {
-            var message = history[i];
-            if (message.Role == ChatRole.System)
-            {
-                // ignore system prompts that aren't at the beginning
-                if (i == 0)
-                {
-                    if (string.IsNullOrWhiteSpace(_template.System))
-                    {
-                        systemMsgWithoutSystemTemplate = message.Text ?? string.Empty;
-                    }
-                    else
-                    {
-                        prompt.Append(_template.System.Replace(TEMPLATE_PLACEHOLDER, message.Text));
-                    }
-                }
-            }
-            else if (message.Role == ChatRole.User)
-            {
-                string msgText = message.Text ?? string.Empty;
-                if (i == 1 && !string.IsNullOrWhiteSpace(systemMsgWithoutSystemTemplate))
-                {
-                    msgText = $"{systemMsgWithoutSystemTemplate} {msgText}";
-                }
-
-                prompt.Append(string.IsNullOrWhiteSpace(_template.User) ?
-                    msgText :
-                    _template.User.Replace(TEMPLATE_PLACEHOLDER, msgText));
-            }
-            else if (message.Role == ChatRole.Assistant)
-            {
-                prompt.Append(string.IsNullOrWhiteSpace(_template.Assistant) ?
-                    message.Text :
-                    _template.Assistant.Replace(TEMPLATE_PLACEHOLDER, message.Text));
-            }
-        }
-
-        if (!string.IsNullOrWhiteSpace(_template.Assistant))
-        {
-            var substringIndex = _template.Assistant.IndexOf(TEMPLATE_PLACEHOLDER, StringComparison.InvariantCulture);
-            prompt.Append(_template.Assistant[..substringIndex]);
-        }
-
-        return prompt.ToString();
-    }
-
-    public Task<ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
-        GetStreamingResponseAsync(chatMessages, options, cancellationToken).ToChatResponseAsync(cancellationToken: cancellationToken);
-
-    public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
-        IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
-    {
-        var prompt = GetPrompt(chatMessages);
-
-        if (!IsReady)
-        {
-            throw new InvalidOperationException("Model is not ready");
-        }
-
-        await Task.CompletedTask.ConfigureAwait(ConfigureAwaitOptions.ForceYielding);
-
-        using var generatorParams = new GeneratorParams(_model);
-
-        using var sequences = _tokenizer.Encode(prompt);
-
-        void TransferMetadataValue(string propertyName, object defaultValue)
-        {
-            object? val = null;
-            options?.AdditionalProperties?.TryGetValue(propertyName, out val);
-
-            val ??= defaultValue;
-
-            if (val is int intVal)
-            {
-                generatorParams.SetSearchOption(propertyName, intVal);
-            }
-            else if (val is float floatVal)
-            {
-                generatorParams.SetSearchOption(propertyName, floatVal);
-            }
-            else if (val is bool boolVal)
-            {
-                generatorParams.SetSearchOption(propertyName, boolVal);
-            }
-        }
-
-        if (options != null)
-        {
-            TransferMetadataValue("min_length", DefaultMinLength);
-            TransferMetadataValue("do_sample", DefaultDoSample);
-            generatorParams.SetSearchOption("temperature", (double)(options?.Temperature ?? DefaultTemperature));
-            generatorParams.SetSearchOption("top_p", (double)(options?.TopP ?? DefaultTopP));
-            generatorParams.SetSearchOption("top_k", options?.TopK ?? DefaultTopK);
-        }
-
-        generatorParams.SetSearchOption("max_length", (options?.MaxOutputTokens ?? DefaultMaxLength) + sequences[0].Length);
-        generatorParams.TryGraphCaptureWithMaxBatchSize(1);
-
-        using var tokenizerStream = _tokenizer.CreateStream();
-        using var generator = new Generator(_model, generatorParams);
-        generator.AppendTokenSequences(sequences);
-        StringBuilder stringBuilder = new();
-        bool stopTokensAvailable = _template != null && _template.Stop != null && _template.Stop.Length > 0;
-        while (!generator.IsDone())
-        {
-            string part;
-            try
-            {
-                if (cancellationToken.IsCancellationRequested)
-                {
-                    break;
-                }
-
-                generator.GenerateNextToken();
-                part = tokenizerStream.Decode(generator.GetSequence(0)[^1]);
-
-                if (cancellationToken.IsCancellationRequested && stopTokensAvailable)
-                {
-                    part = _template!.Stop!.Last();
-                }
-
-                stringBuilder.Append(part);
-
-                if (stopTokensAvailable)
-                {
-                    var str = stringBuilder.ToString();
-                    if (_template!.Stop!.Any(str.Contains))
-                    {
-                        break;
-                    }
-                }
-            }
-            catch (Exception)
-            {
-                break;
-            }
-
-            yield return new()
-            {
-                Role = ChatRole.Assistant,
-                Text = part,
-            };
-        }
-    }
-
-    private Task InitializeAsync(string modelDir, CancellationToken cancellationToken = default)
-    {
-        return Task.Run(
-            () =>
-            {
-                _model = new Model(modelDir);
-                cancellationToken.ThrowIfCancellationRequested();
-                _tokenizer = new Tokenizer(_model);
-            },
-            cancellationToken);
-    }
-
-    public object? GetService(Type serviceType, object? serviceKey = null)
-    {
-        return
-            serviceKey is not null ? null :
-            serviceType == typeof(ChatClientMetadata) ? _metadata :
-            _model is not null && serviceType?.IsInstanceOfType(_model) is true ? _model :
-            _tokenizer is not null && serviceType?.IsInstanceOfType(_tokenizer) is true ? _tokenizer :
-            serviceType?.IsInstanceOfType(this) is true ? this :
-            serviceType?.IsInstanceOfType(typeof(ChatOptions)) is true ? GetDefaultChatOptions() :
-            null;
-    }
-}
\ No newline at end of file
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
new file mode 100644
index 00000000..e709223d
--- /dev/null
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using Microsoft.Extensions.AI;
+using Microsoft.ML.OnnxRuntimeGenAI;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace AIDevGallery.Samples.SharedCode;
+
+internal static class OnnxRuntimeGenAIChatClientFactory
+{
+    private const string TEMPLATE_PLACEHOLDER = "{{CONTENT}}";
+
+    private const int DefaultMaxLength = 1024;
+
+    private static readonly SemaphoreSlim _createSemaphore = new(1, 1);
+    private static OgaHandle? _ogaHandle;
+
+    public static async Task<IChatClient?> CreateAsync(string modelDir, LlmPromptTemplate? template = null, CancellationToken cancellationToken = default)
+    {
+        var options = new OnnxRuntimeGenAIChatClientOptions
+        {
+            StopSequences = template?.Stop ?? Array.Empty<string>(),
+            PromptFormatter = (chatMessages, _) => GetPrompt(template, chatMessages)
+        };
+
+        var lockAcquired = false;
+        OnnxRuntimeGenAIChatClient? model = null;
+        try
+        {
+            // ensure we call CreateAsync one at a time to avoid fun issues
+            await _createSemaphore.WaitAsync(cancellationToken);
+            lockAcquired = true;
+            cancellationToken.ThrowIfCancellationRequested();
+            await Task.Run(
+                () =>
+                {
+                    cancellationToken.ThrowIfCancellationRequested();
+                    model = new OnnxRuntimeGenAIChatClient(options, modelDir);
+                    cancellationToken.ThrowIfCancellationRequested();
+                },
+                cancellationToken);
+        }
+        catch
+        {
+            model?.Dispose();
+            return null;
+        }
+        finally
+        {
+            if (lockAcquired)
+            {
+                _createSemaphore.Release();
+            }
+        }
+
+        return (model
+            ?.AsBuilder())
+            ?.ConfigureOptions(o =>
+            {
+                o.AdditionalProperties ??= [];
+                o.AdditionalProperties["max_length"] = DefaultMaxLength;
+            })
+            ?.Build();
+    }
+
+    public static void InitializeGenAI()
+    {
+        _ogaHandle = new OgaHandle();
+    }
+
+    private static string GetPrompt(LlmPromptTemplate? template, IEnumerable<ChatMessage> history)
+    {
+        if (!history.Any())
+        {
+            return string.Empty;
+        }
+
+        if (template == null)
+        {
+            return string.Join(". ", history);
+        }
+
+        StringBuilder prompt = new();
+
+        string systemMsgWithoutSystemTemplate = string.Empty;
+
+        for (var i = 0; i < history.Count(); i++)
+        {
+            var message = history.ElementAt(i);
+            if (message.Role == ChatRole.System)
+            {
+                // ignore system prompts that aren't at the beginning
+                if (i == 0)
+                {
+                    if (string.IsNullOrWhiteSpace(template.System))
+                    {
+                        systemMsgWithoutSystemTemplate = message.Text ?? string.Empty;
+                    }
+                    else
+                    {
+                        prompt.Append(template.System.Replace(TEMPLATE_PLACEHOLDER, message.Text));
+                    }
+                }
+            }
+            else if (message.Role == ChatRole.User)
+            {
+                string msgText = message.Text ?? string.Empty;
+                if (i == 1 && !string.IsNullOrWhiteSpace(systemMsgWithoutSystemTemplate))
+                {
+                    msgText = $"{systemMsgWithoutSystemTemplate} {msgText}";
+                }
+
+                prompt.Append(string.IsNullOrWhiteSpace(template.User) ?
+                    msgText :
+                    template.User.Replace(TEMPLATE_PLACEHOLDER, msgText));
+            }
+            else if (message.Role == ChatRole.Assistant)
+            {
+                prompt.Append(string.IsNullOrWhiteSpace(template.Assistant) ?
+                    message.Text :
+                    template.Assistant.Replace(TEMPLATE_PLACEHOLDER, message.Text));
+            }
+        }
+
+        if (!string.IsNullOrWhiteSpace(template.Assistant))
+        {
+            var substringIndex = template.Assistant.IndexOf(TEMPLATE_PLACEHOLDER, StringComparison.InvariantCulture);
+            prompt.Append(template.Assistant[..substringIndex]);
+        }
+
+        return prompt.ToString();
+    }
+}
\ No newline at end of file
diff --git a/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs b/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
index 95cb3b61..1029425d 100644
--- a/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
+++ b/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
@@ -20,7 +20,7 @@ namespace AIDevGallery.Samples.WCRAPIs;
     Id = "21f2c4a5-3d8e-4b7a-9c0f-6d2e5f3b1c8d",
     Scenario = ScenarioType.TextGenerateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Icon = "\uEE6F")]
 internal sealed partial class PhiSilicaBasic : BaseSamplePage
diff --git a/Directory.Packages.props b/Directory.Packages.props
index 419b41a1..eb884782 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -3,7 +3,7 @@
     <PackageVersion Include="CommunityToolkit.WinUI.Behaviors" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Media" Version="8.1.240916" />
     <PackageVersion Include="Microsoft.AI.DirectML" Version="1.15.4" />
-    <PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25114.11" />
+    <PackageVersion Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25114.11" />
     <PackageVersion Include="Microsoft.SemanticKernel.Connectors.InMemory" Version="1.39.0-preview" />
     <PackageVersion Include="Microsoft.SemanticKernel.Core" Version="1.39.0" />
     <PackageVersion Include="Microsoft.Xaml.Behaviors.WinUI.Managed" Version="2.0.9" />
@@ -23,8 +23,8 @@
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.20.2" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Extensions" Version="0.13.0" />
     <PackageVersion Include="Microsoft.WindowsAppSDK" Version="1.7.250127003-experimental3" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.6.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.6.0" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.7.0-rc1" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc1" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.6.0" />
     <PackageVersion Include="CommunityToolkit.WinUI.Animations" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Extensions" Version="8.1.240916" />

From efa77f921ebf49fb1b7470a57b63772bc7215c97 Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 13:47:49 -0800
Subject: [PATCH 2/8] Small update to send chatOptions to the GetPrompt method.

---
 .../IChatClient/OnnxRuntimeGenAIChatClientFactory.cs          | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
index e709223d..2130e2b5 100644
--- a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
@@ -26,7 +26,7 @@ internal static class OnnxRuntimeGenAIChatClientFactory
         var options = new OnnxRuntimeGenAIChatClientOptions
         {
             StopSequences = template?.Stop ?? Array.Empty<string>(),
-            PromptFormatter = (chatMessages, _) => GetPrompt(template, chatMessages)
+            PromptFormatter = (chatMessages, chatOptions) => GetPrompt(template, chatMessages, chatOptions)
         };
 
         var lockAcquired = false;
@@ -74,7 +74,7 @@ public static void InitializeGenAI()
         _ogaHandle = new OgaHandle();
     }
 
-    private static string GetPrompt(LlmPromptTemplate? template, IEnumerable<ChatMessage> history)
+    private static string GetPrompt(LlmPromptTemplate? template, IEnumerable<ChatMessage> history, ChatOptions? chatOptions)
     {
         if (!history.Any())
         {

From 2d97db20188f69bf9bad85d053a1688e76801144 Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 14:42:59 -0800
Subject: [PATCH 3/8] Simplified project for arm64.

---
 AIDevGallery/AIDevGallery.csproj | 7 +------
 Directory.Packages.props         | 1 +
 2 files changed, 2 insertions(+), 6 deletions(-)

diff --git a/AIDevGallery/AIDevGallery.csproj b/AIDevGallery/AIDevGallery.csproj
index 23263457..c20cec45 100644
--- a/AIDevGallery/AIDevGallery.csproj
+++ b/AIDevGallery/AIDevGallery.csproj
@@ -85,12 +85,7 @@
 
   <ItemGroup Condition="$(Platform) == 'ARM64'">
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Qnn" />
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI" GeneratePathProperty="true" ExcludeAssets="all" />
-    <None Include="$(PKGMicrosoft_ML_OnnxRuntimeGenAI)\runtimes\win-arm64\native\onnxruntime-genai.dll">
-	    <Link>onnxruntime-genai.dll</Link>
-	    <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
-	    <Visible>false</Visible>
-    </None>
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
   </ItemGroup>
 	
diff --git a/Directory.Packages.props b/Directory.Packages.props
index eb884782..8ecd93e0 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -26,6 +26,7 @@
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.7.0-rc1" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc1" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.6.0" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" Version="0.6.0" />
     <PackageVersion Include="CommunityToolkit.WinUI.Animations" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Extensions" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Controls.Sizers" Version="8.1.240916" />

From 7ef866190d76af48f907f0aea667720449cbb365 Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 14:51:30 -0800
Subject: [PATCH 4/8] Simplified exported projects.

---
 AIDevGallery/ProjectGenerator/Generator.cs | 14 ++------------
 1 file changed, 2 insertions(+), 12 deletions(-)

diff --git a/AIDevGallery/ProjectGenerator/Generator.cs b/AIDevGallery/ProjectGenerator/Generator.cs
index db9d8e80..11214054 100644
--- a/AIDevGallery/ProjectGenerator/Generator.cs
+++ b/AIDevGallery/ProjectGenerator/Generator.cs
@@ -296,6 +296,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
                     packageReferenceItem.Condition = "$(Platform) == 'x64'";
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntime.Qnn" ||
+                            packageName == "Microsoft.ML.OnnxRuntimeGenAI.QNN" ||
                             packageName == "Microsoft.ML.OnnxRuntimeGenAI")
                 {
                     packageReferenceItem.Condition = "$(Platform) == 'ARM64'";
@@ -303,24 +304,13 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
 
                 var versionStr = PackageVersionHelpers.PackageVersions[packageName];
                 packageReferenceItem.AddMetadata("Version", versionStr, true);
-
-                if (packageName == "Microsoft.ML.OnnxRuntimeGenAI")
-                {
-                    var noneItem = itemGroup.AddItem("None", "$(PKGMicrosoft_ML_OnnxRuntimeGenAI)\\runtimes\\win-arm64\\native\\onnxruntime-genai.dll");
-                    noneItem.Condition = "$(Platform) == 'ARM64'";
-                    noneItem.AddMetadata("Link", "onnxruntime-genai.dll", false);
-                    noneItem.AddMetadata("CopyToOutputDirectory", "PreserveNewest", false);
-                    noneItem.AddMetadata("Visible", "false", false);
-
-                    packageReferenceItem.AddMetadata("GeneratePathProperty", "true", true);
-                    packageReferenceItem.AddMetadata("ExcludeAssets", "all", true);
-                }
             }
 
             foreach (var packageName in packageReferences)
             {
                 if (packageName == "Microsoft.ML.OnnxRuntime.DirectML")
                 {
+                    AddPackageReference(itemGroup, "Microsoft.AI.DirectML");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntime.Qnn");
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntimeGenAI.DirectML")

From 4588722d5f818c02acbd5da5920684e6ac35b62b Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 15:27:54 -0800
Subject: [PATCH 5/8] Small fix.

---
 AIDevGallery/AIDevGallery.csproj | 1 +
 1 file changed, 1 insertion(+)

diff --git a/AIDevGallery/AIDevGallery.csproj b/AIDevGallery/AIDevGallery.csproj
index c20cec45..41195dbb 100644
--- a/AIDevGallery/AIDevGallery.csproj
+++ b/AIDevGallery/AIDevGallery.csproj
@@ -85,6 +85,7 @@
 
   <ItemGroup Condition="$(Platform) == 'ARM64'">
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Qnn" />
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
   </ItemGroup>

From 762aaf5fd11ca60c04988660e75c9113cc7b462e Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Thu, 27 Feb 2025 16:00:28 -0800
Subject: [PATCH 6/8] More fixes on generated projects.

---
 AIDevGallery/AIDevGallery.csproj           | 4 +---
 AIDevGallery/ProjectGenerator/Generator.cs | 2 +-
 Directory.Packages.props                   | 1 -
 3 files changed, 2 insertions(+), 5 deletions(-)

diff --git a/AIDevGallery/AIDevGallery.csproj b/AIDevGallery/AIDevGallery.csproj
index 41195dbb..6a26e998 100644
--- a/AIDevGallery/AIDevGallery.csproj
+++ b/AIDevGallery/AIDevGallery.csproj
@@ -80,21 +80,19 @@
     <PackageReference Include="NAudio.WinMM" />
     <PackageReference Include="System.Numerics.Tensors" />
     <PackageReference Include="WinUIEx" />
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
     <Manifest Include="$(ApplicationManifest)" />
   </ItemGroup>
 
   <ItemGroup Condition="$(Platform) == 'ARM64'">
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Qnn" />
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" />
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
   </ItemGroup>
 	
   <ItemGroup Condition="$(Platform) == 'x64'">
     <PackageReference Include="Microsoft.AI.DirectML" />
     <PackageReference Include="Microsoft.ML.OnnxRuntime.DirectML" />
     <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" />
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
   </ItemGroup>
 
   <ItemGroup>
diff --git a/AIDevGallery/ProjectGenerator/Generator.cs b/AIDevGallery/ProjectGenerator/Generator.cs
index 11214054..c5d5b359 100644
--- a/AIDevGallery/ProjectGenerator/Generator.cs
+++ b/AIDevGallery/ProjectGenerator/Generator.cs
@@ -316,7 +316,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
                 else if (packageName == "Microsoft.ML.OnnxRuntimeGenAI.DirectML")
                 {
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntime.Qnn");
-                    AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI");
+                    AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.QNN");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.Managed");
                 }
 
diff --git a/Directory.Packages.props b/Directory.Packages.props
index 8ecd93e0..cbc9fe6d 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -23,7 +23,6 @@
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.20.2" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Extensions" Version="0.13.0" />
     <PackageVersion Include="Microsoft.WindowsAppSDK" Version="1.7.250127003-experimental3" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.7.0-rc1" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc1" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.6.0" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" Version="0.6.0" />

From 170f72eab07ec8f91b2038bd4fc662f2c3a75314 Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Fri, 14 Mar 2025 16:36:35 -0700
Subject: [PATCH 7/8] Updated to 0.7.0-rc2.

---
 .../Samples/SharedCode/Controls/SmartTextBox.cs        |  2 +-
 .../IChatClient/OnnxRuntimeGenAIChatClientFactory.cs   |  2 +-
 .../Samples/SharedCode/IChatClient/PhiSilicaClient.cs  | 10 +++++-----
 Directory.Packages.props                               |  8 ++++----
 4 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/AIDevGallery/Samples/SharedCode/Controls/SmartTextBox.cs b/AIDevGallery/Samples/SharedCode/Controls/SmartTextBox.cs
index c30e04bb..566f98b2 100644
--- a/AIDevGallery/Samples/SharedCode/Controls/SmartTextBox.cs
+++ b/AIDevGallery/Samples/SharedCode/Controls/SmartTextBox.cs
@@ -93,7 +93,7 @@ private async Task<string> Infer(string systemPrompt, string query, ChatOptions?
                     new ChatMessage(ChatRole.User, query)
             ],
             options,
-            _cts.Token)).Message.Text ?? string.Empty;
+            _cts.Token)).Text ?? string.Empty;
     }
 
     private async Task<string> ChangeToneProfessional(string textToChange)
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
index 2130e2b5..77e00f45 100644
--- a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
@@ -41,7 +41,7 @@ await Task.Run(
                 () =>
                 {
                     cancellationToken.ThrowIfCancellationRequested();
-                    model = new OnnxRuntimeGenAIChatClient(options, modelDir);
+                    model = new OnnxRuntimeGenAIChatClient(modelDir, options);
                     cancellationToken.ThrowIfCancellationRequested();
                 },
                 cancellationToken);
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs b/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
index 5cc55e53..04c16ea6 100644
--- a/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
@@ -60,10 +60,10 @@ private static ChatOptions GetDefaultChatOptions()
         return phiSilicaClient;
     }
 
-    public Task<ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
+    public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
         GetStreamingResponseAsync(chatMessages, options, cancellationToken).ToChatResponseAsync(cancellationToken: cancellationToken);
 
-    public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+    public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IEnumerable<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
     {
         if (_languageModel == null)
         {
@@ -72,12 +72,12 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(ILis
 
         var prompt = GetPrompt(chatMessages);
 
+        string responseId = Guid.NewGuid().ToString("N");
         await foreach (var part in GenerateStreamResponseAsync(prompt, options, cancellationToken))
         {
-            yield return new ChatResponseUpdate
+            yield return new ChatResponseUpdate(ChatRole.Assistant, part)
             {
-                Role = ChatRole.Assistant,
-                Text = part,
+                ResponseId = responseId
             };
         }
     }
diff --git a/Directory.Packages.props b/Directory.Packages.props
index ab97d417..96fe42ff 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -20,12 +20,12 @@
     <PackageVersion Include="CommunityToolkit.WinUI.UI.Controls.Markdown" Version="7.1.2" />
     <PackageVersion Include="Microsoft.Build" Version="17.13.9" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.DirectML" Version="1.21.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.20.2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.21.0" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Extensions" Version="0.14.0" />
     <PackageVersion Include="Microsoft.WindowsAppSDK" Version="1.7.250127003-experimental3" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc1" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.6.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" Version="0.6.0" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.7.0-rc2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" Version="0.7.0-rc2" />
     <PackageVersion Include="CommunityToolkit.WinUI.Animations" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Extensions" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Controls.Sizers" Version="8.1.240916" />

From 5b61ceec6dd7c02ed4ea64ab9f696eacab75b136 Mon Sep 17 00:00:00 2001
From: Alexandre Zollinger Chohfi <alzollin@microsoft.com>
Date: Tue, 18 Mar 2025 16:10:14 -0700
Subject: [PATCH 8/8] Small fix.

---
 AIDevGallery/ProjectGenerator/Generator.cs | 1 +
 1 file changed, 1 insertion(+)

diff --git a/AIDevGallery/ProjectGenerator/Generator.cs b/AIDevGallery/ProjectGenerator/Generator.cs
index e8c8bfeb..d5d5590e 100644
--- a/AIDevGallery/ProjectGenerator/Generator.cs
+++ b/AIDevGallery/ProjectGenerator/Generator.cs
@@ -298,6 +298,7 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntimeGenAI.DirectML")
                 {
+                    AddPackageReference(itemGroup, "Microsoft.AI.DirectML");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntime.Qnn");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.QNN");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.Managed");