2
0
mirror of https://github.com/esiur/esiur-dotnet.git synced 2026-03-31 18:38:22 +00:00

Annotations

This commit is contained in:
2026-03-19 19:33:06 +03:00
parent e300173bdd
commit 93e0785941
4 changed files with 69 additions and 0 deletions

View File

@@ -0,0 +1,25 @@
using Esiur.Core;
using Esiur.Data;
using Esiur.Misc;
using Esiur.Protocol;
using Esiur.Resource;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading.Tasks;
using static System.Net.Mime.MediaTypeNames;
namespace Esiur.Tests.Annotations;
[Resource]
public partial class Agent;

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="OpenAI" Version="2.9.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Esiur\Esiur.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,19 @@
// The endpoint for LM Studio's local server
using OpenAI;
using OpenAI.Chat;
using System.Data;
var endpoint = "http://localhost:1234/v1";
var client = new OpenAIClient(new OpenAIClientOptions()
{
Endpoint = new Uri("http://localhost:1234/v1")
});
var chat = client.GetChatClient("local-model");
var response = await chat.CompleteAsync(
"Explain what this function does"
);
Console.WriteLine(response.Value.Content[0].Text);