Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add NativeAOT testapp project for M.E.AI #5573

Merged
merged 3 commits into from
Oct 26, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@

using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.Json.Serialization.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Shared.Diagnostics;
Expand Down Expand Up @@ -587,10 +589,9 @@ private sealed class OpenAIChatToolJson
string? result = resultContent.Result as string;
if (result is null && resultContent.Result is not null)
{
JsonSerializerOptions options = ToolCallJsonSerializerOptions ?? JsonContext.Default.Options;
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
try
{
result = JsonSerializer.Serialize(resultContent.Result, options.GetTypeInfo(typeof(object)));
result = JsonSerializer.Serialize(resultContent.Result, JsonContext.GetTypeInfo(typeof(object), ToolCallJsonSerializerOptions));
}
catch (NotSupportedException)
{
Expand All @@ -617,7 +618,9 @@ private sealed class OpenAIChatToolJson
ChatToolCall.CreateFunctionToolCall(
callRequest.CallId,
callRequest.Name,
BinaryData.FromObjectAsJson(callRequest.Arguments, ToolCallJsonSerializerOptions)));
new(JsonSerializer.SerializeToUtf8Bytes(
callRequest.Arguments,
JsonContext.GetTypeInfo(typeof(IDictionary<string, object?>), ToolCallJsonSerializerOptions)))));
}
}

Expand Down Expand Up @@ -670,8 +673,53 @@ private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8
argumentParser: static json => JsonSerializer.Deserialize(json, JsonContext.Default.IDictionaryStringObject)!);

/// <summary>Source-generated JSON type information.</summary>
[JsonSourceGenerationOptions(JsonSerializerDefaults.Web,
UseStringEnumConverter = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true)]
[JsonSerializable(typeof(OpenAIChatToolJson))]
[JsonSerializable(typeof(IDictionary<string, object?>))]
[JsonSerializable(typeof(JsonElement))]
private sealed partial class JsonContext : JsonSerializerContext;
private sealed partial class JsonContext : JsonSerializerContext
{
/// <summary>Gets the <see cref="JsonSerializerOptions"/> singleton used as the default in JSON serialization operations.</summary>
private static readonly JsonSerializerOptions _defaultToolJsonOptions = CreateDefaultToolJsonOptions();

/// <summary>Gets JSON type information for the specified type.</summary>
/// <remarks>
/// This first tries to get the type information from <paramref name="firstOptions"/>,
/// falling back to <see cref="_defaultToolJsonOptions"/> if it can't.
/// </remarks>
public static JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions? firstOptions) =>
firstOptions?.TryGetTypeInfo(type, out JsonTypeInfo? info) is true ?
info :
_defaultToolJsonOptions.GetTypeInfo(type);

/// <summary>Creates the default <see cref="JsonSerializerOptions"/> to use for serialization-related operations.</summary>
[UnconditionalSuppressMessage("AotAnalysis", "IL3050", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "DefaultJsonTypeInfoResolver is only used when reflection-based serialization is enabled")]
private static JsonSerializerOptions CreateDefaultToolJsonOptions()
{
// If reflection-based serialization is enabled by default, use it, as it's the most permissive in terms of what it can serialize,
// and we want to be flexible in terms of what can be put into the various collections in the object model.
// Otherwise, use the source-generated options to enable Native AOT.
stephentoub marked this conversation as resolved.
Show resolved Hide resolved

if (JsonSerializer.IsReflectionEnabledByDefault)
{
// Keep in sync with the JsonSourceGenerationOptions on JsonContext below.
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
JsonSerializerOptions options = new(JsonSerializerDefaults.Web)
{
TypeInfoResolver = new DefaultJsonTypeInfoResolver(),
Converters = { new JsonStringEnumConverter() },
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true,
};

options.MakeReadOnly();
return options;
}

return Default.Options;
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net9.0</TargetFrameworks>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
<PublishAot>true</PublishAot>
<TrimmerSingleWarn>false</TrimmerSingleWarn>
<SelfContained>true</SelfContained>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
</PropertyGroup>

<PropertyGroup>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved
</PropertyGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\src\Libraries\Microsoft.Extensions.AI\Microsoft.Extensions.AI.csproj" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI" />

<ProjectReference Include="..\..\..\src\Libraries\Microsoft.Extensions.AI.Abstractions\Microsoft.Extensions.AI.Abstractions.csproj" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.Abstractions" />

<!-- Azure.AI.Inference produces many warnings
<ProjectReference Include="..\..\..\src\Libraries\Microsoft.Extensions.AI.AzureAIInference\Microsoft.Extensions.AI.AzureAIInference.csproj" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.AzureAIInference" />
-->

<ProjectReference Include="..\..\..\src\Libraries\Microsoft.Extensions.AI.Ollama\Microsoft.Extensions.AI.Ollama.csproj" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.Ollama" />

<!-- OpenAI produces a few warnings
<ProjectReference Include="..\..\..\src\Libraries\Microsoft.Extensions.AI.OpenAI\Microsoft.Extensions.AI.OpenAI.csproj" />
<TrimmerRootAssembly Include="Microsoft.Extensions.AI.OpenAI" />
-->
</ItemGroup>
stephentoub marked this conversation as resolved.
Show resolved Hide resolved

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.

#pragma warning disable CA1031 // Do not catch general exception types
#pragma warning disable S108 // Nested blocks of code should not be left empty
#pragma warning disable S125 // Remove this commented out code
stephentoub marked this conversation as resolved.
Show resolved Hide resolved

using Microsoft.Extensions.AI;

// Use types from each library
stephentoub marked this conversation as resolved.
Show resolved Hide resolved

// Microsoft.Extensions.AI.AzureAIInference
// using var a = new Azure.AI.Inference.ChatCompletionClient(new Uri("http://localhost"), new("apikey")); // uncomment once warnings in Azure.AI.Inference are addressed

// Microsoft.Extensions.AI.OpenAI
// using var c = new OpenAI.OpenAIClient("apikey").AsChatClient("gpt-4o-mini"); // uncomment once warnings in OpenAI are addressed

// Microsoft.Extensions.AI.Ollama
using var b = new OllamaChatClient("http://localhost:11434", "llama3.2");

// Microsoft.Extensions.AI
AIFunctionFactory.Create(() => { });

System.Console.WriteLine("Success!");
Loading