I have a program in .Net Core to describe an image using Azure OpenAPI GPT-4o model.
using Azure;
using Azure.AI.OpenAI;
using OpenAI;
using OpenAI.Chat;
using System;
using System.ClientModel;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
// Azure OpenAI client library for .NET - version 2.1.0
public class AzureOpenAiService : IAzureOpenAiService
{
private static string endpoint = "/";
private static string deployment = "gpt-4o";
private static string apiKey = "LFK";
public async Task FindPrimarySubjectAsync(string imagePath)
{
try
{
string base64Image = EncodeImage(imagePath);
var credential = new AzureKeyCredential(apiKey);
OpenAIClientOptions openAIClientOptions = new OpenAIClientOptions
{
Endpoint = new Uri(endpoint)
};
var client = new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(apiKey));
var chatMessages = new List<ChatMessage>
{
new SystemChatMessage("Analyze the uploaded image and return a single-word description of the main subject. The response should be only one word, representing the most general yet accurate category."),
new UserChatMessage($"What is in this image? Image: data:image/png;base64,{base64Image}")
};
var chatRequest = new ChatCompletionOptions();
var chatClient = client.GetChatClient(deployment);
var response = await chatClient.CompleteChatAsync(chatMessages, chatRequest); // Stuck here.
var content = response.Value.Content;
}
catch(Exception ex)
{
throw;
}
}
private static string EncodeImage(string imagePath)
{
byte[] imageBytes = File.ReadAllBytes(imagePath);
return Convert.ToBase64String(imageBytes);
}
}
This code is getting stuck when CompleteChatAsync() is invoked. I waited for more than 5 minutes and there was no response.
When I tried the same code using python, it returned the response within 5-6 seconds.
from openai import AzureOpenAI
import base64
endpoint = '/'
deployment = 'gpt-4o'
api_key = "LFK"
api_version = "2024-05-01-preview"
def encode_image(image_path):
"""Encodes an image to base64 format."""
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode("utf-8")
def analyze_image(image_path):
base64_image = encode_image(image_path)
client = AzureOpenAI(
azure_endpoint=endpoint,
api_key=api_key,
api_version=api_version
)
response = client.chatpletions.create(
model="gpt-4o",
messages=[
{"role": "system", "content": "Analyze the uploaded image and return a single-word description of the main subject. The response should be only one word, representing the most general yet accurate category."},
{
"role": "user",
"content": [
{"type": "text", "text": f"What ?"},
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64_image}"}}
],
}
]
)
return response.choices[0].message.content
image_path = "image3.png"
result = analyze_image(image_path)
print("AI Response:", result)
Why is the .Net code not responding?
I have a program in .Net Core to describe an image using Azure OpenAPI GPT-4o model.
using Azure;
using Azure.AI.OpenAI;
using OpenAI;
using OpenAI.Chat;
using System;
using System.ClientModel;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
// Azure OpenAI client library for .NET - version 2.1.0
public class AzureOpenAiService : IAzureOpenAiService
{
private static string endpoint = "https://xyz.openai.azure.com/";
private static string deployment = "gpt-4o";
private static string apiKey = "LFK";
public async Task FindPrimarySubjectAsync(string imagePath)
{
try
{
string base64Image = EncodeImage(imagePath);
var credential = new AzureKeyCredential(apiKey);
OpenAIClientOptions openAIClientOptions = new OpenAIClientOptions
{
Endpoint = new Uri(endpoint)
};
var client = new AzureOpenAIClient(new Uri(endpoint), new ApiKeyCredential(apiKey));
var chatMessages = new List<ChatMessage>
{
new SystemChatMessage("Analyze the uploaded image and return a single-word description of the main subject. The response should be only one word, representing the most general yet accurate category."),
new UserChatMessage($"What is in this image? Image: data:image/png;base64,{base64Image}")
};
var chatRequest = new ChatCompletionOptions();
var chatClient = client.GetChatClient(deployment);
var response = await chatClient.CompleteChatAsync(chatMessages, chatRequest); // Stuck here.
var content = response.Value.Content;
}
catch(Exception ex)
{
throw;
}
}
private static string EncodeImage(string imagePath)
{
byte[] imageBytes = File.ReadAllBytes(imagePath);
return Convert.ToBase64String(imageBytes);
}
}
This code is getting stuck when CompleteChatAsync() is invoked. I waited for more than 5 minutes and there was no response.
When I tried the same code using python, it returned the response within 5-6 seconds.
from openai import AzureOpenAI
import base64
endpoint = 'https://xyz.openai.azure.com/'
deployment = 'gpt-4o'
api_key = "LFK"
api_version = "2024-05-01-preview"
def encode_image(image_path):
"""Encodes an image to base64 format."""
with open(image_path, "rb") as image_file:
return base64.b64encode(image_file.read()).decode("utf-8")
def analyze_image(image_path):
base64_image = encode_image(image_path)
client = AzureOpenAI(
azure_endpoint=endpoint,
api_key=api_key,
api_version=api_version
)
response = client.chat.completions.create(
model="gpt-4o",
messages=[
{"role": "system", "content": "Analyze the uploaded image and return a single-word description of the main subject. The response should be only one word, representing the most general yet accurate category."},
{
"role": "user",
"content": [
{"type": "text", "text": f"What ?"},
{"type": "image_url", "image_url": {"url": f"data:image/png;base64,{base64_image}"}}
],
}
]
)
return response.choices[0].message.content
image_path = "image3.png"
result = analyze_image(image_path)
print("AI Response:", result)
Why is the .Net code not responding?
Share Improve this question edited Feb 5 at 14:09 Helen 97.7k17 gold badges275 silver badges342 bronze badges asked Feb 5 at 8:59 S7HS7H 1,6052 gold badges27 silver badges40 bronze badges1 Answer
Reset to default 1I am able to resolve this issue.
The first thing to correct is that we have to use BinaryData instead of base64 encoded string.
byte[] imageBytes = File.ReadAllBytes(imagePath);
var binaryImage BinaryData.FromBytes(imageBytes);
The next thing is to correctly create UserChatMessage
object to pass image in the chat.
var chatImageContent = ChatMessageContentPart.CreateImagePart(binaryImage, "image/png");
var chatTextContent = ChatMessageContentPart.CreateTextPart("What is in this image?");
var userChatMessage = new UserChatMessage(chatTextContent, chatImageContent);
var chatMessages = new List<ChatMessage>
{
new SystemChatMessage("Analyze the uploaded image."),
new UserChatMessage(chatTextContent, chatImageContent)
};
var chatClient = client.GetChatClient(deployment);
var response = await chatClient.CompleteChatAsync(chatMessages, chatRequest);
var content = response.Value.Content[0].Text;