diff --git a/AIDevGallery/AIDevGallery.csproj b/AIDevGallery/AIDevGallery.csproj
index 864cb9e8..79c7716e 100644
--- a/AIDevGallery/AIDevGallery.csproj
+++ b/AIDevGallery/AIDevGallery.csproj
@@ -61,7 +61,7 @@
     <PackageReference Include="CommunityToolkit.WinUI.Media" />
     <PackageReference Include="MathNet.Numerics" />
     <PackageReference Include="Microsoft.Build" />
-    <PackageReference Include="Microsoft.Extensions.AI.Abstractions" />
+    <PackageReference Include="Microsoft.Extensions.AI" />
     <PackageReference Include="Microsoft.Extensions.AI.Ollama" />
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Extensions" />
     <PackageReference Include="Microsoft.ML.Tokenizers" />
@@ -85,18 +85,13 @@
     <PackageReference Include="HtmlAgilityPack"/>
     <PackageReference Include="Markdig"/>
     <PackageReference Include="Roman-Numerals"/>
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
     <Manifest Include="$(ApplicationManifest)" />
   </ItemGroup>
 
   <ItemGroup Condition="$(Platform) == 'ARM64'">
     <PackageReference Include="Microsoft.ML.OnnxRuntime.Qnn" />
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI" GeneratePathProperty="true" ExcludeAssets="all" />
-    <None Include="$(PKGMicrosoft_ML_OnnxRuntimeGenAI)\runtimes\win-arm64\native\onnxruntime-genai.dll">
-	    <Link>onnxruntime-genai.dll</Link>
-	    <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
-	    <Visible>false</Visible>
-    </None>
-    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" />
+    <PackageReference Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" />
   </ItemGroup>
 	
   <ItemGroup Condition="$(Platform) == 'x64'">
diff --git a/AIDevGallery/Helpers/SamplesHelper.cs b/AIDevGallery/Helpers/SamplesHelper.cs
index 167ee6bc..16a93fa0 100644
--- a/AIDevGallery/Helpers/SamplesHelper.cs
+++ b/AIDevGallery/Helpers/SamplesHelper.cs
@@ -25,11 +25,11 @@ public static List<SharedCodeEnum> GetAllSharedCode(this Sample sample, Dictiona
         {
             if (!models.Values.Any(m => m.IsApi()))
             {
-                AddUnique(SharedCodeEnum.GenAIModel);
+                AddUnique(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory);
             }
         }
 
-        if (sharedCode.Contains(SharedCodeEnum.GenAIModel))
+        if (sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory))
         {
             AddUnique(SharedCodeEnum.LlmPromptTemplate);
         }
@@ -72,6 +72,7 @@ public static List<string> GetAllNugetPackageReferences(this Sample sample, Dict
             }
             else
             {
+                AddUnique("Microsoft.ML.OnnxRuntimeGenAI.Managed");
                 AddUnique("Microsoft.ML.OnnxRuntimeGenAI.DirectML");
             }
         }
@@ -175,7 +176,7 @@ static string EscapeNewLines(string str)
     private static string? GetChatClientLoaderString(List<SharedCodeEnum> sharedCode, string modelPath, string promptTemplate, bool isPhiSilica, ModelType modelType)
     {
         bool isLanguageModel = ModelDetailsHelper.EqualOrParent(modelType, ModelType.LanguageModels);
-        if (!sharedCode.Contains(SharedCodeEnum.GenAIModel) && !isPhiSilica && !isLanguageModel)
+        if (!sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory) && !isPhiSilica && !isLanguageModel)
         {
             return null;
         }
@@ -191,7 +192,7 @@ static string EscapeNewLines(string str)
             return $"new OllamaChatClient(\"{OllamaHelper.GetOllamaUrl()}\", \"{modelId}\")";
         }
 
-        return $"await GenAIModel.CreateAsync({modelPath}, {promptTemplate})";
+        return $"await OnnxRuntimeGenAIChatClientFactory.CreateAsync({modelPath}, {promptTemplate})";
     }
 
     public static string GetCleanCSCode(this Sample sample, Dictionary<ModelType, (ExpandedModelDetails ExpandedModelDetails, string ModelPathStr)> modelInfos)
@@ -259,9 +260,9 @@ public static string GetCleanCSCode(this Sample sample, Dictionary<ModelType, (E
             }
         }
 
-        if (sharedCode.Contains(SharedCodeEnum.GenAIModel))
+        if (sharedCode.Contains(SharedCodeEnum.OnnxRuntimeGenAIChatClientFactory))
         {
-            cleanCsSource = RegexInitializeComponent().Replace(cleanCsSource, $"$1this.InitializeComponent();$1GenAIModel.InitializeGenAI();");
+            cleanCsSource = RegexInitializeComponent().Replace(cleanCsSource, $"$1this.InitializeComponent();$1OnnxRuntimeGenAIChatClientFactory.InitializeGenAI();");
         }
 
         return cleanCsSource;
diff --git a/AIDevGallery/Models/BaseSampleNavigationParameters.cs b/AIDevGallery/Models/BaseSampleNavigationParameters.cs
index 5be22b10..8cbc1184 100644
--- a/AIDevGallery/Models/BaseSampleNavigationParameters.cs
+++ b/AIDevGallery/Models/BaseSampleNavigationParameters.cs
@@ -35,7 +35,7 @@ public void NotifyCompletion()
             return new OllamaChatClient(OllamaHelper.GetOllamaUrl(), modelId);
         }
 
-        return await GenAIModel.CreateAsync(ChatClientModelPath, ChatClientPromptTemplate, CancellationToken).ConfigureAwait(false);
+        return await OnnxRuntimeGenAIChatClientFactory.CreateAsync(ChatClientModelPath, ChatClientPromptTemplate, CancellationToken).ConfigureAwait(false);
     }
 
     internal abstract void SendSampleInteractionEvent(string? customInfo = null);
diff --git a/AIDevGallery/ProjectGenerator/Generator.cs b/AIDevGallery/ProjectGenerator/Generator.cs
index fd6e4029..4dccecae 100644
--- a/AIDevGallery/ProjectGenerator/Generator.cs
+++ b/AIDevGallery/ProjectGenerator/Generator.cs
@@ -283,38 +283,28 @@ static void AddPackageReference(ProjectItemGroupElement itemGroup, string packag
                     packageReferenceItem.Condition = "$(Platform) == 'x64'";
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntime.Qnn" ||
-                            packageName == "Microsoft.ML.OnnxRuntimeGenAI" ||
-                            packageName == "Microsoft.ML.OnnxRuntimeGenAI.Managed")
+                            packageName == "Microsoft.ML.OnnxRuntimeGenAI.QNN" ||
+                            packageName == "Microsoft.ML.OnnxRuntimeGenAI")
                 {
                     packageReferenceItem.Condition = "$(Platform) == 'ARM64'";
                 }
 
                 var versionStr = PackageVersionHelpers.PackageVersions[packageName];
                 packageReferenceItem.AddMetadata("Version", versionStr, true);
-
-                if (packageName == "Microsoft.ML.OnnxRuntimeGenAI")
-                {
-                    var noneItem = itemGroup.AddItem("None", "$(PKGMicrosoft_ML_OnnxRuntimeGenAI)\\runtimes\\win-arm64\\native\\onnxruntime-genai.dll");
-                    noneItem.Condition = "$(Platform) == 'ARM64'";
-                    noneItem.AddMetadata("Link", "onnxruntime-genai.dll", false);
-                    noneItem.AddMetadata("CopyToOutputDirectory", "PreserveNewest", false);
-                    noneItem.AddMetadata("Visible", "false", false);
-
-                    packageReferenceItem.AddMetadata("GeneratePathProperty", "true", true);
-                    packageReferenceItem.AddMetadata("ExcludeAssets", "all", true);
-                }
             }
 
             foreach (var packageName in packageReferences)
             {
                 if (packageName == "Microsoft.ML.OnnxRuntime.DirectML")
                 {
+                    AddPackageReference(itemGroup, "Microsoft.AI.DirectML");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntime.Qnn");
                 }
                 else if (packageName == "Microsoft.ML.OnnxRuntimeGenAI.DirectML")
                 {
+                    AddPackageReference(itemGroup, "Microsoft.AI.DirectML");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntime.Qnn");
-                    AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI");
+                    AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.QNN");
                     AddPackageReference(itemGroup, "Microsoft.ML.OnnxRuntimeGenAI.Managed");
                 }
 
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs
index 598525dc..1b84a31b 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/RetrievalAugmentedGeneration.xaml.cs	
@@ -42,7 +42,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "Microsoft.ML.Tokenizers",
         "System.Numerics.Tensors",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "9C1FB14D-4841-449C-9563-4551106BB693",
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs
index 9eeaf7bc..cd54c525 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSearch.xaml.cs	
@@ -35,7 +35,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "System.Numerics.Tensors",
         "Microsoft.ML.Tokenizers",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "41391b3f-f143-4719-a171-b0ce9c4cdcd6",
diff --git a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs
index 79d76fd9..0249376b 100644
--- a/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Embeddings/SemanticSuggest.xaml.cs	
@@ -26,7 +26,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.SentenceEmbeddings.Embeddings;
         "System.Numerics.Tensors",
         "Microsoft.ML.Tokenizers",
         "Microsoft.ML.OnnxRuntime.DirectML",
-        "Microsoft.Extensions.AI.Abstractions",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Connectors.InMemory"
     ],
     Id = "c0d6c4f1-8daa-409f-a686-3de388edbf91",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs
index 5f4ae849..ed01d474 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Chat.xaml.cs	
@@ -25,7 +25,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.TextChat,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.Message,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs
index 47a28d8b..bbb7b30b 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/ContentModeration.xaml.cs	
@@ -19,7 +19,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextContentModeration,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [],
     Id = "language-content-moderation",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs
index 46cdd3f1..442d93ad 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/CustomSystemPrompt.xaml.cs	
@@ -25,7 +25,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Icon = "\uE8D4",
     Scenario = ScenarioType.TextCustomParameters,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ])]
 internal sealed partial class CustomSystemPrompt : BaseSamplePage, INotifyPropertyChanged
 {
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs
index 455eb11d..e88415ae 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/ExplainCode.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.CodeExplainCode,
     SharedCode = [],
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Explain Code",
     Id = "ad763407-6a97-4916-ab05-30fd22f54252",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs
index 2e08ebb3..1d90833c 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Generate.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextGenerateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "25bb4e58-d909-4377-b59c-975cd6baff19",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs
index f7323587..ce6f8436 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/GenerateCode.xaml.cs	
@@ -24,7 +24,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.CodeGenerateCode,
     NugetPackageReferences = [
         "ColorCode.WinUI",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Generate Code",
     Id = "2270c051-a91c-4af9-8975-a99fda6b024b",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs
index 6816882c..66060cfb 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/GrammarCheck.xaml.cs	
@@ -17,7 +17,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextGrammarCheckText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Grammar Check",
     Id = "9e1b5ac5-3521-4e88-a2ce-60152a6cb44f",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs
index 650e4c5a..998c915e 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Paraphrase.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextParaphraseText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "9e006e82-8e3f-4401-8a83-d4c4c59cc20c",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs
index 96f54adc..8b982101 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SemanticKernelChat.xaml.cs	
@@ -26,6 +26,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.TextSemanticKernelChat,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
+        "Microsoft.Extensions.AI",
         "Microsoft.SemanticKernel.Core"
     ],
     SharedCode = [
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs
index 4eeebf0a..2d13490b 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SentimentAnalysis.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextAnalyzeSentimentText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Name = "Sentiment Analysis",
     Id = "9cc84d1e-6b02-4bd2-a350-6e38c3a92ced",
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs
index da6a9fa5..29d25a9b 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SmartPaste.xaml.cs	
@@ -18,7 +18,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Scenario = ScenarioType.SmartControlsSmartPaste,
     NugetPackageReferences = [
         "CommunityToolkit.Mvvm",
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.SmartPasteFormCs,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs
index fe94cc6e..293c8679 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/SmartText.xaml.cs	
@@ -16,7 +16,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Icon = "\uE8D4",
     Scenario = ScenarioType.SmartControlsSmartTextBox,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     SharedCode = [
         SharedCodeEnum.SmartTextBoxCs,
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs
index 8d191a84..a43b1829 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Summarize.xaml.cs	
@@ -17,7 +17,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextSummarizeText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "21bf3574-aaa5-42fd-9f6c-3bfbbca00876",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs b/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs
index 33ab3ee1..52438fac 100644
--- a/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs	
+++ b/AIDevGallery/Samples/Open Source Models/Language Models/Translate.xaml.cs	
@@ -19,7 +19,7 @@ namespace AIDevGallery.Samples.OpenSourceModels.LanguageModels;
     Model1Types = [ModelType.LanguageModels, ModelType.PhiSilica],
     Scenario = ScenarioType.TextTranslateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Id = "f045fca2-c657-4894-99f2-d0a1115176bc",
     Icon = "\uE8D4")]
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs b/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs
deleted file mode 100644
index 934b1d87..00000000
--- a/AIDevGallery/Samples/SharedCode/IChatClient/GenAIModel.cs
+++ /dev/null
@@ -1,289 +0,0 @@
-// Copyright (c) Microsoft Corporation. All rights reserved.
-// Licensed under the MIT License.
-
-using Microsoft.Extensions.AI;
-using Microsoft.ML.OnnxRuntimeGenAI;
-using System;
-using System.Collections.Generic;
-using System.Diagnostics.CodeAnalysis;
-using System.Linq;
-using System.Runtime.CompilerServices;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace AIDevGallery.Samples.SharedCode;
-
-internal class GenAIModel : IChatClient
-{
-    private const string TEMPLATE_PLACEHOLDER = "{{CONTENT}}";
-
-    private const int DefaultTopK = 50;
-    private const float DefaultTopP = 0.9f;
-    private const float DefaultTemperature = 1;
-    private const int DefaultMinLength = 0;
-    private const int DefaultMaxLength = 1024;
-    private const bool DefaultDoSample = false;
-
-    private readonly ChatClientMetadata _metadata;
-    private Model? _model;
-    private Tokenizer? _tokenizer;
-    private LlmPromptTemplate? _template;
-    private static readonly SemaphoreSlim _createSemaphore = new(1, 1);
-    private static OgaHandle? _ogaHandle;
-
-    private static ChatOptions GetDefaultChatOptions()
-    {
-        return new ChatOptions
-        {
-            AdditionalProperties = new AdditionalPropertiesDictionary
-            {
-                { "min_length", DefaultMinLength },
-                { "do_sample", DefaultDoSample },
-            },
-            MaxOutputTokens = DefaultMaxLength,
-            Temperature = DefaultTemperature,
-            TopP = DefaultTopP,
-            TopK = DefaultTopK,
-        };
-    }
-
-    private GenAIModel(string modelDir)
-    {
-        _metadata = new ChatClientMetadata("GenAIChatClient", new Uri($"file:///{modelDir}"));
-    }
-
-    public static async Task<GenAIModel?> CreateAsync(string modelDir, LlmPromptTemplate? template = null, CancellationToken cancellationToken = default)
-    {
-#pragma warning disable CA2000 // Dispose objects before losing scope
-        var model = new GenAIModel(modelDir);
-#pragma warning restore CA2000 // Dispose objects before losing scope
-
-        var lockAcquired = false;
-        try
-        {
-            // ensure we call CreateAsync one at a time to avoid fun issues
-            await _createSemaphore.WaitAsync(cancellationToken);
-            lockAcquired = true;
-            cancellationToken.ThrowIfCancellationRequested();
-            await model.InitializeAsync(modelDir, cancellationToken);
-        }
-        catch
-        {
-            model?.Dispose();
-            return null;
-        }
-        finally
-        {
-            if (lockAcquired)
-            {
-                _createSemaphore.Release();
-            }
-        }
-
-        model._template = template;
-        return model;
-    }
-
-    public static void InitializeGenAI()
-    {
-        _ogaHandle = new OgaHandle();
-    }
-
-    [MemberNotNullWhen(true, nameof(_model), nameof(_tokenizer))]
-    public bool IsReady => _model != null && _tokenizer != null;
-
-    public void Dispose()
-    {
-        _model?.Dispose();
-        _tokenizer?.Dispose();
-        _ogaHandle?.Dispose();
-    }
-
-    private string GetPrompt(IEnumerable<ChatMessage> history)
-    {
-        if (!history.Any())
-        {
-            return string.Empty;
-        }
-
-        if (_template == null)
-        {
-            return string.Join(". ", history);
-        }
-
-        StringBuilder prompt = new();
-
-        string systemMsgWithoutSystemTemplate = string.Empty;
-
-        int i = -1;
-        foreach (var message in history)
-        {
-            i++;
-            if (message.Role == ChatRole.System)
-            {
-                // ignore system prompts that aren't at the beginning
-                if (i == 0)
-                {
-                    if (string.IsNullOrWhiteSpace(_template.System))
-                    {
-                        systemMsgWithoutSystemTemplate = message.Text ?? string.Empty;
-                    }
-                    else
-                    {
-                        prompt.Append(_template.System.Replace(TEMPLATE_PLACEHOLDER, message.Text));
-                    }
-                }
-            }
-            else if (message.Role == ChatRole.User)
-            {
-                string msgText = message.Text ?? string.Empty;
-                if (i == 1 && !string.IsNullOrWhiteSpace(systemMsgWithoutSystemTemplate))
-                {
-                    msgText = $"{systemMsgWithoutSystemTemplate} {msgText}";
-                }
-
-                prompt.Append(string.IsNullOrWhiteSpace(_template.User) ?
-                    msgText :
-                    _template.User.Replace(TEMPLATE_PLACEHOLDER, msgText));
-            }
-            else if (message.Role == ChatRole.Assistant)
-            {
-                prompt.Append(string.IsNullOrWhiteSpace(_template.Assistant) ?
-                    message.Text :
-                    _template.Assistant.Replace(TEMPLATE_PLACEHOLDER, message.Text));
-            }
-        }
-
-        if (!string.IsNullOrWhiteSpace(_template.Assistant))
-        {
-            var substringIndex = _template.Assistant.IndexOf(TEMPLATE_PLACEHOLDER, StringComparison.InvariantCulture);
-            prompt.Append(_template.Assistant[..substringIndex]);
-        }
-
-        return prompt.ToString();
-    }
-
-    public Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default) =>
-        GetStreamingResponseAsync(chatMessages, options, cancellationToken).ToChatResponseAsync(cancellationToken: cancellationToken);
-
-    public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
-        IEnumerable<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
-    {
-        var prompt = GetPrompt(chatMessages);
-
-        if (!IsReady)
-        {
-            throw new InvalidOperationException("Model is not ready");
-        }
-
-        await Task.CompletedTask.ConfigureAwait(ConfigureAwaitOptions.ForceYielding);
-
-        using var generatorParams = new GeneratorParams(_model);
-
-        using var sequences = _tokenizer.Encode(prompt);
-
-        void TransferMetadataValue(string propertyName, object defaultValue)
-        {
-            object? val = null;
-            options?.AdditionalProperties?.TryGetValue(propertyName, out val);
-
-            val ??= defaultValue;
-
-            if (val is int intVal)
-            {
-                generatorParams.SetSearchOption(propertyName, intVal);
-            }
-            else if (val is float floatVal)
-            {
-                generatorParams.SetSearchOption(propertyName, floatVal);
-            }
-            else if (val is bool boolVal)
-            {
-                generatorParams.SetSearchOption(propertyName, boolVal);
-            }
-        }
-
-        if (options != null)
-        {
-            TransferMetadataValue("min_length", DefaultMinLength);
-            TransferMetadataValue("do_sample", DefaultDoSample);
-            generatorParams.SetSearchOption("temperature", (double)(options?.Temperature ?? DefaultTemperature));
-            generatorParams.SetSearchOption("top_p", (double)(options?.TopP ?? DefaultTopP));
-            generatorParams.SetSearchOption("top_k", options?.TopK ?? DefaultTopK);
-        }
-
-        generatorParams.SetSearchOption("max_length", (options?.MaxOutputTokens ?? DefaultMaxLength) + sequences[0].Length);
-        generatorParams.TryGraphCaptureWithMaxBatchSize(1);
-
-        using var tokenizerStream = _tokenizer.CreateStream();
-        using var generator = new Generator(_model, generatorParams);
-        generator.AppendTokenSequences(sequences);
-        StringBuilder stringBuilder = new();
-        bool stopTokensAvailable = _template != null && _template.Stop != null && _template.Stop.Length > 0;
-        string responseId = Guid.NewGuid().ToString("N");
-        while (!generator.IsDone())
-        {
-            string part;
-            try
-            {
-                if (cancellationToken.IsCancellationRequested)
-                {
-                    break;
-                }
-
-                generator.GenerateNextToken();
-                part = tokenizerStream.Decode(generator.GetSequence(0)[^1]);
-
-                if (cancellationToken.IsCancellationRequested && stopTokensAvailable)
-                {
-                    part = _template!.Stop!.Last();
-                }
-
-                stringBuilder.Append(part);
-
-                if (stopTokensAvailable)
-                {
-                    var str = stringBuilder.ToString();
-                    if (_template!.Stop!.Any(str.Contains))
-                    {
-                        break;
-                    }
-                }
-            }
-            catch (Exception)
-            {
-                break;
-            }
-
-            yield return new(ChatRole.Assistant, part)
-            {
-                ResponseId = responseId,
-            };
-        }
-    }
-
-    private Task InitializeAsync(string modelDir, CancellationToken cancellationToken = default)
-    {
-        return Task.Run(
-            () =>
-            {
-                _model = new Model(modelDir);
-                cancellationToken.ThrowIfCancellationRequested();
-                _tokenizer = new Tokenizer(_model);
-            },
-            cancellationToken);
-    }
-
-    public object? GetService(Type serviceType, object? serviceKey = null)
-    {
-        return
-            serviceKey is not null ? null :
-            serviceType == typeof(ChatClientMetadata) ? _metadata :
-            _model is not null && serviceType?.IsInstanceOfType(_model) is true ? _model :
-            _tokenizer is not null && serviceType?.IsInstanceOfType(_tokenizer) is true ? _tokenizer :
-            serviceType?.IsInstanceOfType(this) is true ? this :
-            serviceType?.IsInstanceOfType(typeof(ChatOptions)) is true ? GetDefaultChatOptions() :
-            null;
-    }
-}
\ No newline at end of file
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
new file mode 100644
index 00000000..77e00f45
--- /dev/null
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/OnnxRuntimeGenAIChatClientFactory.cs
@@ -0,0 +1,139 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+using Microsoft.Extensions.AI;
+using Microsoft.ML.OnnxRuntimeGenAI;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace AIDevGallery.Samples.SharedCode;
+
+internal static class OnnxRuntimeGenAIChatClientFactory
+{
+    private const string TEMPLATE_PLACEHOLDER = "{{CONTENT}}";
+
+    private const int DefaultMaxLength = 1024;
+
+    private static readonly SemaphoreSlim _createSemaphore = new(1, 1);
+    private static OgaHandle? _ogaHandle;
+
+    public static async Task<IChatClient?> CreateAsync(string modelDir, LlmPromptTemplate? template = null, CancellationToken cancellationToken = default)
+    {
+        var options = new OnnxRuntimeGenAIChatClientOptions
+        {
+            StopSequences = template?.Stop ?? Array.Empty<string>(),
+            PromptFormatter = (chatMessages, chatOptions) => GetPrompt(template, chatMessages, chatOptions)
+        };
+
+        var lockAcquired = false;
+        OnnxRuntimeGenAIChatClient? model = null;
+        try
+        {
+            // ensure we call CreateAsync one at a time to avoid fun issues
+            await _createSemaphore.WaitAsync(cancellationToken);
+            lockAcquired = true;
+            cancellationToken.ThrowIfCancellationRequested();
+            await Task.Run(
+                () =>
+                {
+                    cancellationToken.ThrowIfCancellationRequested();
+                    model = new OnnxRuntimeGenAIChatClient(modelDir, options);
+                    cancellationToken.ThrowIfCancellationRequested();
+                },
+                cancellationToken);
+        }
+        catch
+        {
+            model?.Dispose();
+            return null;
+        }
+        finally
+        {
+            if (lockAcquired)
+            {
+                _createSemaphore.Release();
+            }
+        }
+
+        return (model
+            ?.AsBuilder())
+            ?.ConfigureOptions(o =>
+            {
+                o.AdditionalProperties ??= [];
+                o.AdditionalProperties["max_length"] = DefaultMaxLength;
+            })
+            ?.Build();
+    }
+
+    public static void InitializeGenAI()
+    {
+        _ogaHandle = new OgaHandle();
+    }
+
+    private static string GetPrompt(LlmPromptTemplate? template, IEnumerable<ChatMessage> history, ChatOptions? chatOptions)
+    {
+        if (!history.Any())
+        {
+            return string.Empty;
+        }
+
+        if (template == null)
+        {
+            return string.Join(". ", history);
+        }
+
+        StringBuilder prompt = new();
+
+        string systemMsgWithoutSystemTemplate = string.Empty;
+
+        for (var i = 0; i < history.Count(); i++)
+        {
+            var message = history.ElementAt(i);
+            if (message.Role == ChatRole.System)
+            {
+                // ignore system prompts that aren't at the beginning
+                if (i == 0)
+                {
+                    if (string.IsNullOrWhiteSpace(template.System))
+                    {
+                        systemMsgWithoutSystemTemplate = message.Text ?? string.Empty;
+                    }
+                    else
+                    {
+                        prompt.Append(template.System.Replace(TEMPLATE_PLACEHOLDER, message.Text));
+                    }
+                }
+            }
+            else if (message.Role == ChatRole.User)
+            {
+                string msgText = message.Text ?? string.Empty;
+                if (i == 1 && !string.IsNullOrWhiteSpace(systemMsgWithoutSystemTemplate))
+                {
+                    msgText = $"{systemMsgWithoutSystemTemplate} {msgText}";
+                }
+
+                prompt.Append(string.IsNullOrWhiteSpace(template.User) ?
+                    msgText :
+                    template.User.Replace(TEMPLATE_PLACEHOLDER, msgText));
+            }
+            else if (message.Role == ChatRole.Assistant)
+            {
+                prompt.Append(string.IsNullOrWhiteSpace(template.Assistant) ?
+                    message.Text :
+                    template.Assistant.Replace(TEMPLATE_PLACEHOLDER, message.Text));
+            }
+        }
+
+        if (!string.IsNullOrWhiteSpace(template.Assistant))
+        {
+            var substringIndex = template.Assistant.IndexOf(TEMPLATE_PLACEHOLDER, StringComparison.InvariantCulture);
+            prompt.Append(template.Assistant[..substringIndex]);
+        }
+
+        return prompt.ToString();
+    }
+}
\ No newline at end of file
diff --git a/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs b/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
index 411b7fe8..2e51d284 100644
--- a/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
+++ b/AIDevGallery/Samples/SharedCode/IChatClient/PhiSilicaClient.cs
@@ -77,7 +77,7 @@ public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IEnu
         {
             yield return new(ChatRole.Assistant, part)
             {
-                ResponseId = responseId,
+                ResponseId = responseId
             };
         }
     }
diff --git a/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs b/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
index 95cb3b61..1029425d 100644
--- a/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
+++ b/AIDevGallery/Samples/WCRAPIs/PhiSilicaBasic.xaml.cs
@@ -20,7 +20,7 @@ namespace AIDevGallery.Samples.WCRAPIs;
     Id = "21f2c4a5-3d8e-4b7a-9c0f-6d2e5f3b1c8d",
     Scenario = ScenarioType.TextGenerateText,
     NugetPackageReferences = [
-        "Microsoft.Extensions.AI.Abstractions"
+        "Microsoft.Extensions.AI"
     ],
     Icon = "\uEE6F")]
 internal sealed partial class PhiSilicaBasic : BaseSamplePage
diff --git a/Directory.Packages.props b/Directory.Packages.props
index 3bda7af6..57a4866b 100644
--- a/Directory.Packages.props
+++ b/Directory.Packages.props
@@ -5,7 +5,7 @@
     <PackageVersion Include="CommunityToolkit.WinUI.Helpers" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Media" Version="8.1.240916" />
     <PackageVersion Include="Microsoft.AI.DirectML" Version="1.15.4" />
-    <PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25161.3" />
+    <PackageVersion Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25161.3" />
     <PackageVersion Include="Microsoft.Extensions.AI.Ollama" Version="9.3.0-preview.1.25161.3" />
     <PackageVersion Include="Microsoft.SemanticKernel.Connectors.InMemory" Version="1.42.0-preview" />
     <PackageVersion Include="Microsoft.SemanticKernel.Core" Version="1.42.0" />
@@ -22,12 +22,12 @@
     <PackageVersion Include="CommunityToolkit.WinUI.Converters" Version="8.1.240916" />
     <PackageVersion Include="Microsoft.Build" Version="17.13.9" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.DirectML" Version="1.21.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.20.2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntime.Qnn" Version="1.21.0" />
     <PackageVersion Include="Microsoft.ML.OnnxRuntime.Extensions" Version="0.14.0" />
     <PackageVersion Include="Microsoft.WindowsAppSDK" Version="1.7.250127003-experimental3" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.6.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.6.0" />
-    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.6.0" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.Managed" Version="0.7.0-rc2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.DirectML" Version="0.7.0-rc2" />
+    <PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI.QNN" Version="0.7.0-rc2" />
     <PackageVersion Include="CommunityToolkit.WinUI.Animations" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Extensions" Version="8.1.240916" />
     <PackageVersion Include="CommunityToolkit.WinUI.Controls.Sizers" Version="8.1.240916" />