Skip to content

Commit 7772cf3

Browse files
authored
Merge pull request #582 from betalgo/dev
8.3.0
2 parents b55cbaa + 7b3f36c commit 7772cf3

20 files changed

+288
-176
lines changed

OpenAI.Playground/Program.cs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,12 @@
4949
//await AssistantTestHelper.MessagesTestHelper.RunTests(sdk);
5050
//await AssistantTestHelper.RunTestHelper.RunTests(sdk);
5151
//await AssistantTestHelper.VectorTestHelper.RunTests(sdk);
52+
//await AssistantTestHelper3.RunTests(sdk);
5253

5354
// Vision
5455
//await VisionTestHelper.RunSimpleVisionTest(sdk);
55-
//await VisionTestHelper.RunSimpleVisionStreamTest(sdk);
5656
//await VisionTestHelper.RunSimpleVisionTestUsingBase64EncodedImage(sdk);
57+
//await VisionTestHelper.RunSimpleVisionStreamTest(sdk);
5758

5859
// Tools
5960
//await ChatCompletionTestHelper.RunChatFunctionCallTest(sdk);

OpenAI.Playground/TestHelpers/AssistantHelpers/AssistantTestHelper3.cs

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,12 @@ namespace OpenAI.Playground.TestHelpers.AssistantHelpers;
1010

1111
internal static class AssistantTestHelper3
1212
{
13+
public static async Task RunTests(IOpenAIService sdk)
14+
{
15+
await RunAssistantApiTest(sdk);
16+
await RunHowAssistantsWorkTest(sdk);
17+
}
18+
1319
/// <summary>
1420
/// Test Assistant api
1521
/// </summary>
@@ -133,8 +139,11 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
133139
var sampleFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{fileName}");
134140
var sampleFileAsString = Encoding.UTF8.GetString(sampleFile);
135141

142+
// HACK: drp052424 - CSV format is not supported for Assistant, so we tell OpenAI it's a TXT. A better solution would be to update the test asset.
143+
var fileNameForOpenAI = Path.ChangeExtension(fileName, ".txt");
144+
136145
ConsoleExtensions.WriteLine($"Uploading file: {fileName}", ConsoleColor.DarkCyan);
137-
var uploadFilesResponse = await sdk.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Assistants, sampleFile, fileName);
146+
var uploadFilesResponse = await sdk.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Assistants, sampleFile, fileNameForOpenAI);
138147
if (uploadFilesResponse.Successful)
139148
{
140149
ConsoleExtensions.WriteLine($"{fileName} uploaded", ConsoleColor.DarkGreen);
@@ -145,8 +154,8 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
145154
return;
146155
}
147156

148-
var uplaodFileId = uploadFilesResponse.Id;
149-
ConsoleExtensions.WriteLine($"uplaodFileId:{uplaodFileId}, purpose:{uploadFilesResponse.Purpose}");
157+
var uploadFileId = uploadFilesResponse.Id;
158+
ConsoleExtensions.WriteLine($"uploadFileId:{uploadFileId}, purpose:{uploadFilesResponse.Purpose}");
150159

151160
#endregion
152161

@@ -163,7 +172,6 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
163172
Name = "Qicha",
164173
Model = Models.Gpt_3_5_Turbo_1106,
165174
Tools = new List<ToolDefinition>() { ToolDefinition.DefineCodeInterpreter(), ToolDefinition.DefineFileSearch(), ToolDefinition.DefineFunction(func) },
166-
FileIds = new List<string>() { uplaodFileId }
167175
});
168176

169177
if (assistantResult.Successful)
@@ -207,7 +215,12 @@ public static async Task RunHowAssistantsWorkTest(IOpenAIService sdk)
207215
{
208216
Role = StaticValues.AssistantsStatics.MessageStatics.Roles.User,
209217
Content =new("Where is Zhejiang Jiacheng Supply Chain Co., LTD."),
210-
Attachments = [new() { FileId = uplaodFileId }]
218+
// Tools must be specified for Attachments
219+
Attachments = [new()
220+
{
221+
FileId = uploadFileId,
222+
Tools = [ ToolDefinition.DefineFileSearch() ]
223+
}]
211224
});
212225

213226
if (messageResult.Successful)

OpenAI.Playground/TestHelpers/AssistantHelpers/MessagesTestHelper.cs

Lines changed: 65 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
using OpenAI.Interfaces;
22
using OpenAI.ObjectModels;
3+
using OpenAI.ObjectModels.RequestModels;
34
using OpenAI.ObjectModels.SharedModels;
45
using OpenAI.Playground.ExtensionsAndHelpers;
6+
using static OpenAI.ObjectModels.StaticValues;
57

68
namespace OpenAI.Playground.TestHelpers.AssistantHelpers;
79

@@ -11,11 +13,13 @@ internal static partial class MessagesTestHelper
1113
{
1214
private static string? CreatedMessageId { get; set; }
1315
private static string? CreatedThreadId { get; set; }
16+
private static string? CreatedFileId { get; set; }
1417

1518
public static async Task RunTests(IOpenAIService openAI)
1619
{
1720
ConsoleExtensions.WriteLine("Message Basics Testing is starting:", ConsoleColor.Blue);
1821
await CreateMessage(openAI);
22+
await CreateMessageWithImage(openAI);
1923
await ListMessages(openAI);
2024
await RetrieveMessage(openAI);
2125
await ModifyMessage(openAI);
@@ -39,7 +43,62 @@ public static async Task CreateMessage(IOpenAIService openAI)
3943
}
4044

4145
CreatedThreadId = thread.Id;
42-
var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId, new(StaticValues.AssistantsStatics.MessageStatics.Roles.User, new("How does AI work? Explain it in simple terms.")));
46+
var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId, new(AssistantsStatics.MessageStatics.Roles.User, new("How does AI work? Explain it in simple terms.")));
47+
if (result.Successful)
48+
{
49+
CreatedMessageId = result.Id;
50+
ConsoleExtensions.WriteLine($"Message Created Successfully with ID: {result.Id}", ConsoleColor.Green);
51+
}
52+
else
53+
{
54+
ConsoleExtensions.WriteError(result.Error);
55+
}
56+
}
57+
58+
public static async Task CreateMessageWithImage(IOpenAIService openAI)
59+
{
60+
ConsoleExtensions.WriteLine("Create MessageWithImage Testing is starting:", ConsoleColor.Cyan);
61+
62+
var prompt = "Tell me about this image";
63+
var filename = "image_edit_original.png";
64+
var filePath = $"SampleData/{filename}";
65+
66+
var sampleBytes = await FileExtensions.ReadAllBytesAsync(filePath);
67+
68+
// Upload File
69+
ConsoleExtensions.WriteLine("Upload File Test", ConsoleColor.DarkCyan);
70+
71+
ConsoleExtensions.WriteLine($"Uploading file: {filename}", ConsoleColor.DarkCyan);
72+
var uploadFilesResponse = await openAI.Files.FileUpload(UploadFilePurposes.UploadFilePurpose.Vision, sampleBytes, filename);
73+
if (uploadFilesResponse.Successful)
74+
{
75+
ConsoleExtensions.WriteLine($"{filename} uploaded", ConsoleColor.DarkGreen);
76+
}
77+
else
78+
{
79+
ConsoleExtensions.WriteLine($"{filename} failed", ConsoleColor.DarkRed);
80+
return;
81+
}
82+
83+
var uploadFileId = uploadFilesResponse.Id;
84+
ConsoleExtensions.WriteLine($"uploadFileId:{uploadFileId}, purpose:{uploadFilesResponse.Purpose}");
85+
86+
87+
// Message.ImageFileContent
88+
ConsoleExtensions.WriteLine("Message with ImageFileContent Test:", ConsoleColor.DarkCyan);
89+
90+
MessageContentOneOfType content = new([
91+
MessageContent.TextContent(prompt),
92+
MessageContent.ImageFileContent(uploadFileId, ImageStatics.ImageDetailTypes.High)
93+
]);
94+
95+
MessageCreateRequest request = new()
96+
{
97+
Role = AssistantsStatics.MessageStatics.Roles.User,
98+
Content = content
99+
};
100+
101+
var result = await openAI.Beta.Messages.CreateMessage(CreatedThreadId!, request);
43102
if (result.Successful)
44103
{
45104
CreatedMessageId = result.Id;
@@ -203,6 +262,11 @@ private static async Task Cleanup(IOpenAIService sdk)
203262
{
204263
await sdk.Beta.Threads.ThreadDelete(CreatedThreadId);
205264
}
265+
266+
if (!string.IsNullOrWhiteSpace(CreatedFileId))
267+
{
268+
await sdk.Files.DeleteFile(CreatedFileId);
269+
}
206270
}
207271
}
208272
}

OpenAI.Playground/TestHelpers/AssistantHelpers/VectorTestHelper.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -502,11 +502,11 @@ public static async Task CancelVectorStoreFileBatch(IOpenAIService openAI)
502502

503503
private static async Task Cleanup(IOpenAIService sdk)
504504
{
505-
506-
if (!string.IsNullOrWhiteSpace(CreatedVectorFileId))
505+
if (!string.IsNullOrWhiteSpace(CreatedVectorFileId) && !string.IsNullOrWhiteSpace(CreatedVectorId))
507506
{
508507
await sdk.Beta.VectorStoreFiles.DeleteVectorStoreFile(CreatedVectorId, CreatedVectorFileId);
509508
}
509+
510510
if (!string.IsNullOrWhiteSpace(CreatedFileId1))
511511
{
512512
await sdk.Files.DeleteFile(CreatedFileId1);
@@ -523,4 +523,4 @@ private static async Task Cleanup(IOpenAIService sdk)
523523
}
524524
}
525525
}
526-
}
526+
}

OpenAI.Playground/TestHelpers/VisionTestHelper.cs

Lines changed: 51 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -10,34 +10,28 @@ internal static class VisionTestHelper
1010
{
1111
public static async Task RunSimpleVisionTest(IOpenAIService sdk)
1212
{
13-
ConsoleExtensions.WriteLine("VIsion Testing is starting:", ConsoleColor.Cyan);
13+
ConsoleExtensions.WriteLine("Vision Testing is starting:", ConsoleColor.Cyan);
1414

1515
try
1616
{
1717
ConsoleExtensions.WriteLine("Vision Test:", ConsoleColor.DarkCyan);
1818

19-
var completionResult = await sdk.ChatCompletion.CreateCompletion(
20-
new ChatCompletionCreateRequest
19+
var completionResult = await sdk.ChatCompletion.CreateCompletion(new()
20+
{
21+
Messages = new List<ChatMessage>
2122
{
22-
Messages = new List<ChatMessage>
23+
ChatMessage.FromSystem("You are an image analyzer assistant."),
24+
ChatMessage.FromUser(new List<MessageContent>
2325
{
24-
ChatMessage.FromSystem("You are an image analyzer assistant."),
25-
ChatMessage.FromUser(
26-
new List<MessageContent>
27-
{
28-
MessageContent.TextContent("What is on the picture in details?"),
29-
MessageContent.ImageUrlContent(
30-
"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
31-
ImageStatics.ImageDetailTypes.High
32-
)
33-
}
34-
),
35-
},
36-
MaxTokens = 300,
37-
Model = Models.Gpt_4_vision_preview,
38-
N = 1
39-
}
40-
);
26+
MessageContent.TextContent("What is on the picture in details?"),
27+
MessageContent.ImageUrlContent("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
28+
ImageStatics.ImageDetailTypes.High)
29+
})
30+
},
31+
MaxTokens = 300,
32+
Model = Models.Gpt_4_vision_preview,
33+
N = 1
34+
});
4135

4236
if (completionResult.Successful)
4337
{
@@ -47,12 +41,10 @@ public static async Task RunSimpleVisionTest(IOpenAIService sdk)
4741
{
4842
if (completionResult.Error == null)
4943
{
50-
throw new Exception("Unknown Error");
44+
throw new("Unknown Error");
5145
}
5246

53-
Console.WriteLine(
54-
$"{completionResult.Error.Code}: {completionResult.Error.Message}"
55-
);
47+
Console.WriteLine($"{completionResult.Error.Code}: {completionResult.Error.Message}");
5648
}
5749
}
5850
catch (Exception e)
@@ -69,28 +61,22 @@ public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk)
6961
{
7062
ConsoleExtensions.WriteLine("Vision Stream Test:", ConsoleColor.DarkCyan);
7163

72-
var completionResult = sdk.ChatCompletion.CreateCompletionAsStream(
73-
new ChatCompletionCreateRequest
64+
var completionResult = sdk.ChatCompletion.CreateCompletionAsStream(new()
65+
{
66+
Messages = new List<ChatMessage>
7467
{
75-
Messages = new List<ChatMessage>
68+
ChatMessage.FromSystem("You are an image analyzer assistant."),
69+
ChatMessage.FromUser(new List<MessageContent>
7670
{
77-
ChatMessage.FromSystem("You are an image analyzer assistant."),
78-
ChatMessage.FromUser(
79-
new List<MessageContent>
80-
{
81-
MessageContent.TextContent("What’s in this image?"),
82-
MessageContent.ImageUrlContent(
83-
"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
84-
ImageStatics.ImageDetailTypes.Low
85-
)
86-
}
87-
),
88-
},
89-
MaxTokens = 300,
90-
Model = Models.Gpt_4_vision_preview,
91-
N = 1
92-
}
93-
);
71+
MessageContent.TextContent("What’s in this image?"),
72+
MessageContent.ImageUrlContent("https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg",
73+
ImageStatics.ImageDetailTypes.Low)
74+
})
75+
},
76+
MaxTokens = 300,
77+
Model = Models.Gpt_4_vision_preview,
78+
N = 1
79+
});
9480

9581
await foreach (var completion in completionResult)
9682
{
@@ -102,12 +88,10 @@ public static async Task RunSimpleVisionStreamTest(IOpenAIService sdk)
10288
{
10389
if (completion.Error == null)
10490
{
105-
throw new Exception("Unknown Error");
91+
throw new("Unknown Error");
10692
}
10793

108-
Console.WriteLine(
109-
$"{completion.Error.Code}: {completion.Error.Message}"
110-
);
94+
Console.WriteLine($"{completion.Error.Code}: {completion.Error.Message}");
11195
}
11296
}
11397

@@ -127,39 +111,26 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
127111

128112
try
129113
{
130-
ConsoleExtensions.WriteLine(
131-
"Vision with base64 encoded image Test:",
132-
ConsoleColor.DarkCyan
133-
);
114+
ConsoleExtensions.WriteLine("Vision with base64 encoded image Test:", ConsoleColor.DarkCyan);
134115

135116
const string originalFileName = "image_edit_original.png";
136-
var originalFile = await FileExtensions.ReadAllBytesAsync(
137-
$"SampleData/{originalFileName}"
138-
);
117+
var originalFile = await FileExtensions.ReadAllBytesAsync($"SampleData/{originalFileName}");
139118

140-
var completionResult = await sdk.ChatCompletion.CreateCompletion(
141-
new ChatCompletionCreateRequest
119+
var completionResult = await sdk.ChatCompletion.CreateCompletion(new()
120+
{
121+
Messages = new List<ChatMessage>
142122
{
143-
Messages = new List<ChatMessage>
123+
ChatMessage.FromSystem("You are an image analyzer assistant."),
124+
ChatMessage.FromUser(new List<MessageContent>
144125
{
145-
ChatMessage.FromSystem("You are an image analyzer assistant."),
146-
ChatMessage.FromUser(
147-
new List<MessageContent>
148-
{
149-
MessageContent.TextContent("What is on the picture in details?"),
150-
MessageContent.ImageBinaryContent(
151-
originalFile,
152-
ImageStatics.ImageFileTypes.Png,
153-
ImageStatics.ImageDetailTypes.High
154-
)
155-
}
156-
),
157-
},
158-
MaxTokens = 300,
159-
Model = Models.Gpt_4_vision_preview,
160-
N = 1
161-
}
162-
);
126+
MessageContent.TextContent("What is on the picture in details?"),
127+
MessageContent.ImageBinaryContent(originalFile, ImageStatics.ImageFileTypes.Png, ImageStatics.ImageDetailTypes.High)
128+
})
129+
},
130+
MaxTokens = 300,
131+
Model = Models.Gpt_4_vision_preview,
132+
N = 1
133+
});
163134

164135
if (completionResult.Successful)
165136
{
@@ -169,12 +140,10 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
169140
{
170141
if (completionResult.Error == null)
171142
{
172-
throw new Exception("Unknown Error");
143+
throw new("Unknown Error");
173144
}
174145

175-
Console.WriteLine(
176-
$"{completionResult.Error.Code}: {completionResult.Error.Message}"
177-
);
146+
Console.WriteLine($"{completionResult.Error.Code}: {completionResult.Error.Message}");
178147
}
179148
}
180149
catch (Exception e)
@@ -183,4 +152,4 @@ public static async Task RunSimpleVisionTestUsingBase64EncodedImage(IOpenAIServi
183152
throw;
184153
}
185154
}
186-
}
155+
}

0 commit comments

Comments
 (0)