Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add NativeAOT testapp project for M.E.AI #5573

Merged
merged 3 commits into from
Oct 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,11 @@ private static JsonSerializerOptions CreateDefaultToolJsonOptions()
{
// If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
// and we want to be flexible in terms of what can be put into the various collections in the object model.
// Otherwise, use the source-generated options to enable Native AOT.
// Otherwise, use the source-generated options to enable trimming and Native AOT.

if (JsonSerializer.IsReflectionEnabledByDefault)
{
// Keep in sync with the JsonSourceGenerationOptions on JsonContext below.
// Keep in sync with the JsonSourceGenerationOptions attribute on JsonContext above.
JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
{
TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@

using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.Json.Serialization.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Shared.Diagnostics;
Expand Down Expand Up @@ -587,10 +589,9 @@ private sealed class OpenAIChatToolJson
string? result = resultContent.Result as string;
if (result is null && resultContent.Result is not null)
{
JsonSerializerOptions options = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options;
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
try
{
result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object)));
result = JsonSerializer.Serialize(resultContent.Result, JsonContext.GetTypeInfo(typeof(object), ToolCallJsonSerializerOptions));
}
catch (NotSupportedException)
{
Expand All @@ -617,7 +618,9 @@ private sealed class OpenAIChatToolJson
ChatToolCall.CreateFunctionToolCall(
callRequest.CallId,
callRequest.Name,
BinaryData.FromObjectAsJson(callRequest.Arguments, ToolCallJsonSerializerOptions)));
new(JsonSerializer.SerializeToUtf8Bytes(
callRequest.Arguments,
JsonContext.GetTypeInfo(typeof(IDictionary<string, object?>), ToolCallJsonSerializerOptions)))));
}
}

Expand Down Expand Up @@ -670,8 +673,53 @@ private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8
argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!);

/// <summary>Source-generated JSON type information.</summary>
[JsonSourceGenerationOptions(JsonSerializerDefaults.Web,
UseStringEnumConverter = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true)]
[JsonSerializable(typeof(OpenAIChatToolJson))]
[JsonSerializable(typeof(IDictionary<string, object?>))]
[JsonSerializable(typeof(JsonElement))]
private sealed partial class JsonContext : JsonSerializerContext;
private sealed partial class JsonContext : JsonSerializerContext
{
/// <summary>Gets the <see cref="JsonSerializerOptions"/> singleton used as the default in JSON serialization operations.</summary>
private static readonly JsonSerializerOptions _defaultToolJsonOptions = CreateDefaultToolJsonOptions();

/// <summary>Gets JSON type information for the specified type.</summary>
/// <remarks>
/// This first tries to get the type information from <paramref name="firstOptions"/>,
/// falling back to <see cref="_defaultToolJsonOptions"/> if it can't.
/// </remarks>
public static JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions? firstOptions) =>
firstOptions?.TryGetTypeInfo(type, out JsonTypeInfo? info) is true ?
info :
_defaultToolJsonOptions.GetTypeInfo(type);

/// <summary>Creates the default <see cref="JsonSerializerOptions"/> to use for serialization-related operations.</summary>
[UnconditionalSuppressMessage("AotAnalysis", "IL3050", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
private static JsonSerializerOptions CreateDefaultToolJsonOptions()
{
// If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
// and we want to be flexible in terms of what can be put into the various collections in the object model.
// Otherwise, use the source-generated options to enable trimming and Native AOT.

if (JsonSerializer.IsReflectionEnabledByDefault)
{
// Keep in sync with the JsonSourceGenerationOptions attribute on JsonContext above.
JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
{
TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
Converters = { new JsonStringEnumConverter() },
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true,
};

options.MakeReadOnly();
return options;
}

return Default.Options;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ private static JsonSerializerOptions CreateDefaultOptions()
{
// If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
// and we want to be flexible in terms of what can be put into the various collections in the object model.
// Otherwise, use the source-generated options to enable Native AOT.
// Otherwise, use the source-generated options to enable trimming and Native AOT.

if (JsonSerializer.IsReflectionEnabledByDefault)
{
// Keep in sync with the JsonSourceGenerationOptions on JsonContext below.
// Keep in sync with the JsonSourceGenerationOptions attribute on JsonContext below.
JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
{
TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>$(LatestTargetFramework)</TargetFrameworks>
<PublishAot>true</PublishAot>
<TrimmerSingleWarn>false</TrimmerSingleWarn>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
</PropertyGroup>

<ItemGroup>
<TrimmerRootAssembly Include="Microsoft.Extensions.AI" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.Abstractions" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.Ollama" />
<!-- Azure.AI.Inference produces many warnings
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.AzureAIInference" />
-->
<!-- OpenAI produces a few warnings
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.OpenAI" />
-->

<TrimmerRootAssembly Update="@(TrimmerRootAssembly)" Path="$(RepoRoot)\src\Libraries\%(Identity)\%(Identity).csproj" />
<ProjectReference Include="@(TrimmerRootAssembly->'%(Path)')" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.

#pragma warning disable S125 // Remove this commented out code

using Microsoft.Extensions.AI;

// Use types from each library.

// Microsoft.Extensions.AI.Ollama
using var b = new OllamaChatClient("http://localhost:11434", "llama3.2");

// Microsoft.Extensions.AI.AzureAIInference
// using var a = new Azure.AI.Inference.ChatCompletionClient(new Uri("http://localhost"), new("apikey")); // uncomment once warnings in Azure.AI.Inference are addressed

// Microsoft.Extensions.AI.OpenAI
// using var c = new OpenAI.OpenAIClient("apikey").AsChatClient("gpt-4o-mini"); // uncomment once warnings in OpenAI are addressed

// Microsoft.Extensions.AI
AIFunctionFactory.Create(() => { });

System.Console.WriteLine("Success!");
Loading