Skip to content

Commit a03ee5b

Browse files
authored
Update Azure.AI.Inference to 1.0.0-beta.3 (#5904)
1 parent f225a93 commit a03ee5b

File tree

4 files changed

+125
-41
lines changed

4 files changed

+125
-41
lines changed

eng/packages/General.props

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
33
<ItemGroup>
44
<PackageVersion Include="Azure.Storage.Files.DataLake" Version="12.21.0" />
5-
<PackageVersion Include="Azure.AI.Inference" Version="1.0.0-beta.2" />
5+
<PackageVersion Include="Azure.AI.Inference" Version="1.0.0-beta.3" />
66
<PackageVersion Include="ICSharpCode.Decompiler" Version="8.2.0.7535" />
77
<PackageVersion Include="Microsoft.Bcl.HashCode" Version="1.1.1" />
88
<PackageVersion Include="Microsoft.CodeAnalysis.Analyzers" Version="$(MicrosoftCodeAnalysisAnalyzersVersion)" />

src/Libraries/Microsoft.Extensions.AI.AzureAIInference/AzureAIInferenceChatClient.cs

Lines changed: 60 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,9 @@ public sealed class AzureAIInferenceChatClient : IChatClient
3333
/// <summary>The <see cref="JsonSerializerOptions"/> use for any serialization activities related to tool call arguments and results.</summary>
3434
private JsonSerializerOptions _toolCallJsonSerializerOptions = AIJsonUtilities.DefaultOptions;
3535

36+
/// <summary>Gets a ChatRole.Developer value.</summary>
37+
private static ChatRole ChatRoleDeveloper { get; } = new("developer");
38+
3639
/// <summary>Initializes a new instance of the <see cref="AzureAIInferenceChatClient"/> class for the specified <see cref="ChatCompletionsClient"/>.</summary>
3740
/// <param name="chatCompletionsClient">The underlying client.</param>
3841
/// <param name="modelId">The ID of the model to use. If null, it can be provided per request via <see cref="ChatOptions.ModelId"/>.</param>
@@ -273,6 +276,7 @@ private static ChatRole ToChatRole(global::Azure.AI.Inference.ChatRole role) =>
273276
role.Equals(global::Azure.AI.Inference.ChatRole.User) ? ChatRole.User :
274277
role.Equals(global::Azure.AI.Inference.ChatRole.Assistant) ? ChatRole.Assistant :
275278
role.Equals(global::Azure.AI.Inference.ChatRole.Tool) ? ChatRole.Tool :
279+
role.Equals(global::Azure.AI.Inference.ChatRole.Developer) ? ChatRoleDeveloper :
276280
new ChatRole(role.ToString());
277281

278282
/// <summary>Converts an AzureAI finish reason to an Extensions finish reason.</summary>
@@ -365,17 +369,40 @@ private ChatCompletionsOptions ToAzureAIOptions(IList<ChatMessage> chatContents,
365369

366370
if (options.ResponseFormat is ChatResponseFormatText)
367371
{
368-
result.ResponseFormat = new ChatCompletionsResponseFormatText();
372+
result.ResponseFormat = ChatCompletionsResponseFormat.CreateTextFormat();
369373
}
370-
else if (options.ResponseFormat is ChatResponseFormatJson)
374+
else if (options.ResponseFormat is ChatResponseFormatJson json)
371375
{
372-
result.ResponseFormat = new ChatCompletionsResponseFormatJSON();
376+
if (json.Schema is { } schema)
377+
{
378+
var tool = JsonSerializer.Deserialize(schema, JsonContext.Default.AzureAIChatToolJson)!;
379+
result.ResponseFormat = ChatCompletionsResponseFormat.CreateJsonFormat(
380+
json.SchemaName ?? "json_schema",
381+
new Dictionary<string, BinaryData>
382+
{
383+
["type"] = _objectString,
384+
["properties"] = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool.Properties, JsonContext.Default.DictionaryStringJsonElement)),
385+
["required"] = BinaryData.FromBytes(JsonSerializer.SerializeToUtf8Bytes(tool.Required, JsonContext.Default.ListString)),
386+
["additionalProperties"] = _falseString,
387+
},
388+
json.SchemaDescription);
389+
}
390+
else
391+
{
392+
result.ResponseFormat = ChatCompletionsResponseFormat.CreateJsonFormat();
393+
}
373394
}
374395
}
375396

376397
return result;
377398
}
378399

400+
/// <summary>Cached <see cref="BinaryData"/> for "object".</summary>
401+
private static readonly BinaryData _objectString = BinaryData.FromString("\"object\"");
402+
403+
/// <summary>Cached <see cref="BinaryData"/> for "false".</summary>
404+
private static readonly BinaryData _falseString = BinaryData.FromString("false");
405+
379406
/// <summary>Converts an Extensions function to an AzureAI chat tool.</summary>
380407
private static ChatCompletionsToolDefinition ToAzureAIChatTool(AIFunction aiFunction)
381408
{
@@ -401,6 +428,10 @@ private IEnumerable<ChatRequestMessage> ToAzureAIInferenceChatMessages(IList<Cha
401428
{
402429
yield return new ChatRequestSystemMessage(input.Text ?? string.Empty);
403430
}
431+
else if (input.Role == ChatRoleDeveloper)
432+
{
433+
yield return new ChatRequestDeveloperMessage(input.Text ?? string.Empty);
434+
}
404435
else if (input.Role == ChatRole.Tool)
405436
{
406437
foreach (AIContent item in input.Contents)
@@ -477,6 +508,32 @@ private static List<ChatMessageContentItem> GetContentParts(IList<AIContent> con
477508
parts.Add(new ChatMessageImageContentItem(new Uri(uri)));
478509
}
479510

511+
break;
512+
513+
case DataContent dataContent when dataContent.MediaTypeStartsWith("audio/"):
514+
if (dataContent.Data.HasValue)
515+
{
516+
AudioContentFormat format;
517+
if (dataContent.MediaTypeStartsWith("audio/mpeg"))
518+
{
519+
format = AudioContentFormat.Mp3;
520+
}
521+
else if (dataContent.MediaTypeStartsWith("audio/wav"))
522+
{
523+
format = AudioContentFormat.Wav;
524+
}
525+
else
526+
{
527+
break;
528+
}
529+
530+
parts.Add(new ChatMessageAudioContentItem(BinaryData.FromBytes(dataContent.Data.Value), format));
531+
}
532+
else if (dataContent.Uri is string uri)
533+
{
534+
parts.Add(new ChatMessageAudioContentItem(new Uri(uri)));
535+
}
536+
480537
break;
481538
}
482539
}

src/Libraries/Microsoft.Extensions.AI.AzureAIInference/Microsoft.Extensions.AI.AzureAIInference.csproj

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929

3030
<ItemGroup>
3131
<PackageReference Include="Azure.AI.Inference" />
32-
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" />
3332
<PackageReference Include="System.Memory.Data" />
3433
<PackageReference Include="System.Text.Json" />
3534
</ItemGroup>

test/Libraries/Microsoft.Extensions.AI.AzureAIInference.Tests/AzureAIInferenceChatClientTests.cs

Lines changed: 64 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,12 @@ public void GetService_SuccessfullyReturnsUnderlyingClient()
107107
public async Task BasicRequestResponse_NonStreaming(bool multiContent)
108108
{
109109
const string Input = """
110-
{"messages":[{"content":"hello","role":"user"}],"max_tokens":10,"temperature":0.5,"model":"gpt-4o-mini"}
110+
{
111+
"messages": [{"role":"user", "content":"hello"}],
112+
"max_tokens":10,
113+
"temperature":0.5,
114+
"model":"gpt-4o-mini"
115+
}
111116
""";
112117

113118
const string Output = """
@@ -178,7 +183,12 @@ [new ChatMessage(ChatRole.User, "hello".Select(c => (AIContent)new TextContent(c
178183
public async Task BasicRequestResponse_Streaming(bool multiContent)
179184
{
180185
const string Input = """
181-
{"messages":[{"content":"hello","role":"user"}],"max_tokens":20,"temperature":0.5,"stream":true,"model":"gpt-4o-mini"}
186+
{
187+
"messages": [{"role":"user", "content":"hello"}],
188+
"max_tokens":20,
189+
"temperature":0.5,
190+
"stream":true,
191+
"model":"gpt-4o-mini"}
182192
""";
183193

184194
const string Output = """
@@ -248,7 +258,7 @@ public async Task AdditionalOptions_NonStreaming()
248258
{
249259
const string Input = """
250260
{
251-
"messages":[{"content":"hello","role":"user"}],
261+
"messages":[{"role":"user", "content":"hello"}],
252262
"max_tokens":10,
253263
"temperature":0.5,
254264
"top_p":0.5,
@@ -305,7 +315,7 @@ public async Task ResponseFormat_Text_NonStreaming()
305315
{
306316
const string Input = """
307317
{
308-
"messages":[{"content":"hello","role":"user"}],
318+
"messages":[{"role":"user", "content":"hello"}],
309319
"model":"gpt-4o-mini",
310320
"response_format":{"type":"text"}
311321
}
@@ -341,7 +351,7 @@ public async Task ResponseFormat_Json_NonStreaming()
341351
{
342352
const string Input = """
343353
{
344-
"messages":[{"content":"hello","role":"user"}],
354+
"messages":[{"role":"user", "content":"hello"}],
345355
"model":"gpt-4o-mini",
346356
"response_format":{"type":"json_object"}
347357
}
@@ -375,14 +385,32 @@ public async Task ResponseFormat_Json_NonStreaming()
375385
[Fact]
376386
public async Task ResponseFormat_JsonSchema_NonStreaming()
377387
{
378-
// NOTE: Azure.AI.Inference doesn't yet expose JSON schema support, so it's currently
379-
// mapped to "json_object" for the time being.
380-
381388
const string Input = """
382389
{
383-
"messages":[{"content":"hello","role":"user"}],
390+
"messages":[{"role":"user", "content":"hello"}],
384391
"model":"gpt-4o-mini",
385-
"response_format":{"type":"json_object"}
392+
"response_format":
393+
{
394+
"type":"json_schema",
395+
"json_schema":
396+
{
397+
"name": "DescribedObject",
398+
"schema":
399+
{
400+
"type":"object",
401+
"properties":
402+
{
403+
"description":
404+
{
405+
"type":"string"
406+
}
407+
},
408+
"required":["description"],
409+
"additionalProperties":false
410+
},
411+
"description":"An object with a description"
412+
}
413+
}
386414
}
387415
""";
388416

@@ -428,30 +456,30 @@ public async Task MultipleMessages_NonStreaming()
428456
{
429457
"messages": [
430458
{
431-
"content": "You are a really nice friend.",
432-
"role": "system"
459+
"role": "system",
460+
"content": "You are a really nice friend."
433461
},
434462
{
435-
"content": "hello!",
436-
"role": "user"
463+
"role": "user",
464+
"content": "hello!"
437465
},
438466
{
439-
"content": "hi, how are you?",
440-
"role": "assistant"
467+
"role": "assistant",
468+
"content": "hi, how are you?"
441469
},
442470
{
443-
"content": "i\u0027m good. how are you?",
444-
"role": "user"
471+
"role": "user",
472+
"content": "i\u0027m good. how are you?"
445473
},
446474
{
475+
"role": "assistant",
447476
"content": "",
448-
"tool_calls": [{"id":"abcd123","type":"function","function":{"name":"GetMood","arguments":"null"}}],
449-
"role": "assistant"
477+
"tool_calls": [{"id":"abcd123","type":"function","function":{"name":"GetMood","arguments":"null"}}]
450478
},
451479
{
480+
"role": "tool",
452481
"content": "happy",
453-
"tool_call_id": "abcd123",
454-
"role": "tool"
482+
"tool_call_id": "abcd123"
455483
}
456484
],
457485
"temperature": 0.25,
@@ -544,21 +572,21 @@ public async Task MultipleContent_NonStreaming()
544572
"messages":
545573
[
546574
{
575+
"role": "user",
547576
"content":
548577
[
549578
{
550-
"text": "Describe this picture.",
551-
"type": "text"
579+
"type": "text",
580+
"text": "Describe this picture."
552581
},
553582
{
583+
"type": "image_url",
554584
"image_url":
555585
{
556586
"url": "http://dot.net/someimage.png"
557-
},
558-
"type": "image_url"
587+
}
559588
}
560-
],
561-
"role":"user"
589+
]
562590
}
563591
],
564592
"model": "gpt-4o-mini"
@@ -598,12 +626,12 @@ public async Task NullAssistantText_ContentEmpty_NonStreaming()
598626
{
599627
"messages": [
600628
{
601-
"content": "",
602-
"role": "assistant"
629+
"role": "assistant",
630+
"content": ""
603631
},
604632
{
605-
"content": "hello!",
606-
"role": "user"
633+
"role": "user",
634+
"content": "hello!"
607635
}
608636
],
609637
"model": "gpt-4o-mini"
@@ -686,8 +714,8 @@ public async Task FunctionCallContent_NonStreaming(ChatToolMode mode)
686714
{
687715
"messages": [
688716
{
689-
"content": "How old is Alice?",
690-
"role": "user"
717+
"role": "user",
718+
"content": "How old is Alice?"
691719
}
692720
],
693721
"model": "gpt-4o-mini",
@@ -797,8 +825,8 @@ public async Task FunctionCallContent_Streaming()
797825
{
798826
"messages": [
799827
{
800-
"content": "How old is Alice?",
801-
"role": "user"
828+
"role": "user",
829+
"content": "How old is Alice?"
802830
}
803831
],
804832
"stream": true,

0 commit comments

Comments
 (0)