te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>c# - 'IChatClient' does not contain a definition for 'CompleteStreamingAsync'? - Stack Overflow
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

c# - 'IChatClient' does not contain a definition for 'CompleteStreamingAsync'? - Stack Overflow

programmeradmin5浏览0评论

I'm trying to follow this tutorial:

This is my source code:

using Microsoft.Extensions.AI;
using Microsoft.Extensions.AI.Ollama;

namespace Chat
{
    internal class Program
    {
        static async Task Main(string[] args)
        {
            IChatClient chatClient =
                new OllamaChatClient(new Uri("http://localhost:11434/"), "phi3:mini");

            // Start the conversation with context for the AI model
            List<ChatMessage> chatHistory = new();

            while (true)
            {
                // Get user prompt and add to chat history
                Console.WriteLine("Your prompt:");
                var userPrompt = Console.ReadLine();
                chatHistory.Add(new ChatMessage(ChatRole.User, userPrompt));

                // Stream the AI response and add to chat history
                Console.WriteLine("AI Response:");
                var response = "";
                await foreach (var item in
                    chatClient.CompleteStreamingAsync(chatHistory))
                {
                    Console.Write(item.Text);
                    response += item.Text;
                }
                chatHistory.Add(new ChatMessage(ChatRole.Assistant, response));
                Console.WriteLine();
            }
        }
    }
}

The problem is that I'm getting this error message:

The type or namespace name 'Ollama' does not exist in the namespace 
'Microsoft.Extensions.AI' (are you missing an assembly reference?)

'IChatClient' does not contain a definition for 
'CompleteStreamingAsync' and no accessible extension method 
'CompleteStreamingAsync' accepting a first argument of type 
'IChatClient' could be found (are you missing a using directive or an assembly 
reference?)

I have included these Nuget packages:

<PackageReference Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Extensions.AI.Ollama" Version="9.3.0-preview.1.25114.11" />

I'm trying to follow this tutorial:

https://learn.microsoft/en-us/dotnet/ai/quickstarts/quickstart-local-ai

This is my source code:

using Microsoft.Extensions.AI;
using Microsoft.Extensions.AI.Ollama;

namespace Chat
{
    internal class Program
    {
        static async Task Main(string[] args)
        {
            IChatClient chatClient =
                new OllamaChatClient(new Uri("http://localhost:11434/"), "phi3:mini");

            // Start the conversation with context for the AI model
            List<ChatMessage> chatHistory = new();

            while (true)
            {
                // Get user prompt and add to chat history
                Console.WriteLine("Your prompt:");
                var userPrompt = Console.ReadLine();
                chatHistory.Add(new ChatMessage(ChatRole.User, userPrompt));

                // Stream the AI response and add to chat history
                Console.WriteLine("AI Response:");
                var response = "";
                await foreach (var item in
                    chatClient.CompleteStreamingAsync(chatHistory))
                {
                    Console.Write(item.Text);
                    response += item.Text;
                }
                chatHistory.Add(new ChatMessage(ChatRole.Assistant, response));
                Console.WriteLine();
            }
        }
    }
}

The problem is that I'm getting this error message:

The type or namespace name 'Ollama' does not exist in the namespace 
'Microsoft.Extensions.AI' (are you missing an assembly reference?)

'IChatClient' does not contain a definition for 
'CompleteStreamingAsync' and no accessible extension method 
'CompleteStreamingAsync' accepting a first argument of type 
'IChatClient' could be found (are you missing a using directive or an assembly 
reference?)

I have included these Nuget packages:

<PackageReference Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Extensions.AI.Ollama" Version="9.3.0-preview.1.25114.11" />
Share Improve this question edited yesterday desertnaut 60.4k32 gold badges152 silver badges178 bronze badges asked 2 days ago OlavTOlavT 2,6666 gold badges34 silver badges61 bronze badges 0
Add a comment  | 

1 Answer 1

Reset to default 0

There have been some breaking changes since the tutorial was published.

Here is a corrected version of the code, which works with the latest versions of the Github packages:

using System.ComponentModel;
using Microsoft.Extensions.AI;

namespace Chat
{
    internal class Program
    {
        static async Task Main(string[] args)
        {
            IChatClient client = new ChatClientBuilder(new OllamaChatClient(new Uri("http://localhost:11434"), "phi4"))
                .UseFunctionInvocation()
                .Build();

            // Start the conversation with context for the AI model
            List<ChatMessage> chatHistory = new();

            while (true)
            {
                // Get user prompt and add to chat history
                Console.WriteLine("Your prompt:");
                var userPrompt = Console.ReadLine();
                chatHistory.Add(new ChatMessage(ChatRole.User, userPrompt));

                // Stream the AI response and add to chat history
                Console.WriteLine("AI Response:");
                var responseString = "";
                var response = client.GetStreamingResponseAsync(chatHistory);
                await foreach (var update in response)
                {
                    Console.Write(update);
                    responseString += update;
                }
                chatHistory.Add(new ChatMessage(ChatRole.Assistant, responseString));
                Console.WriteLine();
            }
        }
    }
}
发布评论

评论列表(0)

  1. 暂无评论