mirror of
https://github.com/esiur/esiur-dotnet.git
synced 2026-03-31 10:28:21 +00:00
repair
This commit is contained in:
@@ -73,7 +73,7 @@ public sealed class LlmRunner
|
|||||||
|
|
||||||
var client = new OpenAIClient(
|
var client = new OpenAIClient(
|
||||||
model.ApiKey,
|
model.ApiKey,
|
||||||
new OpenAIClientOptions { Endpoint = new Uri(model.Endpoint) });
|
new OpenAIClientOptions { Endpoint = new Uri(model.Endpoint), });
|
||||||
|
|
||||||
var chat = client.GetChatClient(model.ModelName);
|
var chat = client.GetChatClient(model.ModelName);
|
||||||
|
|
||||||
@@ -188,7 +188,14 @@ public sealed class LlmRunner
|
|||||||
new UserChatMessage(prompt)
|
new UserChatMessage(prompt)
|
||||||
};
|
};
|
||||||
|
|
||||||
var result = await chat.CompleteChatAsync(messages);
|
var options = new ChatCompletionOptions
|
||||||
|
{
|
||||||
|
MaxOutputTokenCount = 800, // Sets the maximum number of tokens to generate in the response
|
||||||
|
Temperature = 0.8f,
|
||||||
|
// Other options like NucleusSamplingFactor (TopP), FrequencyPenalty, etc. can also be set here
|
||||||
|
};
|
||||||
|
|
||||||
|
var result = await chat.CompleteChatAsync(messages, options);
|
||||||
return result.Value.Content[0].Text;
|
return result.Value.Content[0].Text;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,64 @@ using System.Data;
|
|||||||
|
|
||||||
|
|
||||||
var endpoint = "http://localhost:1234/v1";
|
var endpoint = "http://localhost:1234/v1";
|
||||||
var credential = new ApiKeyCredential("lm-studio");
|
//var endpoint = "http://127.0.0.1:22334/v1";
|
||||||
|
var credential = new ApiKeyCredential("llm");
|
||||||
|
|
||||||
var runner = new LlmRunner();
|
var runner = new LlmRunner();
|
||||||
|
|
||||||
var models = new List<ModelConfig>
|
var models = new List<ModelConfig>
|
||||||
{
|
{
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "phi-3-mini-4k-instruct-qnn-npu:2",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "phi-3-mini-4k-instruct-qnn-npu:2"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "phi-3.5-mini-instruct-qnn-npu:1",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "phi-3.5-mini-instruct-qnn-npu:1"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "qwen2.5-7b-instruct-qnn-npu:2",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "qwen2.5-7b-instruct-qnn-npu:2"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "deepseek-r1-distill-qwen-7b-qnn-npu:1",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "deepseek-r1-distill-qwen-7b-qnn-npu:1"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "qwen3-4b-2507",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "qwen/qwen3-4b-2507"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "gemma-3n-e4b",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "google/gemma-3n-e4b"
|
||||||
|
},
|
||||||
|
new()
|
||||||
|
{
|
||||||
|
Name = "qwen2.5-7b-instruct-1m",
|
||||||
|
Endpoint = endpoint,
|
||||||
|
ApiKey = credential,
|
||||||
|
ModelName = "qwen2.5-7b-instruct-1m"
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
new()
|
new()
|
||||||
{
|
{
|
||||||
Name = "Phi-4",
|
Name = "Phi-4",
|
||||||
@@ -59,7 +111,7 @@ var models = new List<ModelConfig>
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var (results, summary) = await runner.RunAsync( models.Skip(5).Take(1).ToArray(),
|
var (results, summary) = await runner.RunAsync(models,
|
||||||
250);
|
250);
|
||||||
|
|
||||||
foreach (var item in summary)
|
foreach (var item in summary)
|
||||||
|
|||||||
Reference in New Issue
Block a user