mirror of
https://github.com/esiur/esiur-dotnet.git
synced 2026-03-31 10:28:21 +00:00
LLM
This commit is contained in:
22
Esiur/Schema/Llm/LlmConstantModel.cs
Normal file
22
Esiur/Schema/Llm/LlmConstantModel.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmConstantModel
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("value")]
|
||||
public object? Value { get; set; }
|
||||
|
||||
[JsonPropertyName("annotation")]
|
||||
public string? Annotation { get; set; }
|
||||
}
|
||||
}
|
||||
19
Esiur/Schema/Llm/LlmEventModel.cs
Normal file
19
Esiur/Schema/Llm/LlmEventModel.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmEventModel
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("parameters")]
|
||||
public List<LlmParameterModel> Parameters { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("annotation")]
|
||||
public string? Annotation { get; set; }
|
||||
}
|
||||
}
|
||||
22
Esiur/Schema/Llm/LlmFunctionModel.cs
Normal file
22
Esiur/Schema/Llm/LlmFunctionModel.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmFunctionModel
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("returns")]
|
||||
public string Returns { get; set; } = "void";
|
||||
|
||||
[JsonPropertyName("parameters")]
|
||||
public List<LlmParameterModel> Parameters { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("annotation")]
|
||||
public string? Annotation { get; set; }
|
||||
}
|
||||
}
|
||||
22
Esiur/Schema/Llm/LlmParameterModel.cs
Normal file
22
Esiur/Schema/Llm/LlmParameterModel.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmParameterModel
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("annotation")]
|
||||
public string? Annotation { get; set; }
|
||||
|
||||
[JsonPropertyName("nullable")]
|
||||
public bool Nullable { get; set; }
|
||||
}
|
||||
}
|
||||
30
Esiur/Schema/Llm/LlmPropertyModel.cs
Normal file
30
Esiur/Schema/Llm/LlmPropertyModel.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmPropertyModel
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("access")]
|
||||
public string Access { get; set; } = "readwrite";
|
||||
|
||||
[JsonPropertyName("annotation")]
|
||||
public string? Annotation { get; set; }
|
||||
|
||||
[JsonPropertyName("nullable")]
|
||||
public bool Nullable { get; set; }
|
||||
|
||||
[JsonPropertyName("value")]
|
||||
public object? Value { get; set; }
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
224
Esiur/Schema/Llm/LlmTypeModel.cs
Normal file
224
Esiur/Schema/Llm/LlmTypeModel.cs
Normal file
@@ -0,0 +1,224 @@
|
||||
using Esiur.Data.Types;
|
||||
using Esiur.Resource;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Esiur.Schema.Llm
|
||||
{
|
||||
public sealed class LlmTypeModel
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; set; } = "";
|
||||
|
||||
[JsonPropertyName("summary")]
|
||||
public string? Summary { get; set; }
|
||||
|
||||
[JsonPropertyName("properties")]
|
||||
public List<LlmPropertyModel> Properties { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("functions")]
|
||||
public List<LlmFunctionModel> Functions { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("events")]
|
||||
public List<LlmEventModel> Events { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("constants")]
|
||||
public List<LlmConstantModel> Constants { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("usage_rules")]
|
||||
public List<string> UsageRules { get; set; } = new();
|
||||
|
||||
public static LlmTypeModel FromJson(string json)
|
||||
{
|
||||
return JsonSerializer.Deserialize<LlmTypeModel>(json) ?? new LlmTypeModel();
|
||||
}
|
||||
|
||||
public string ToJson()
|
||||
{
|
||||
return JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true });
|
||||
}
|
||||
|
||||
public string ToJson(IResource value)
|
||||
{
|
||||
foreach(var p in Properties)
|
||||
{
|
||||
if (p.Access == "write")
|
||||
continue;
|
||||
var prop = value.GetType().GetProperty(p.Name);
|
||||
if (prop != null)
|
||||
{
|
||||
var v = prop.GetValue(value);
|
||||
p.Value = v;
|
||||
}
|
||||
}
|
||||
return JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true });
|
||||
|
||||
}
|
||||
|
||||
public static LlmTypeModel FromTypeDef(TypeDef typeDef)
|
||||
{
|
||||
var m = new LlmTypeModel();
|
||||
|
||||
m.Type = typeDef.Name;
|
||||
m.Kind = typeDef.Kind.ToString();
|
||||
|
||||
// summary from annotations
|
||||
if (typeDef.Annotations != null && typeDef.Annotations.Count > 0)
|
||||
{
|
||||
if (typeDef.Annotations.ContainsKey("summary"))
|
||||
m.Summary = typeDef.Annotations["summary"];
|
||||
else if (typeDef.Annotations.ContainsKey(""))
|
||||
m.Summary = typeDef.Annotations[""];
|
||||
}
|
||||
|
||||
// properties
|
||||
foreach (var p in typeDef.Properties)
|
||||
{
|
||||
var pm = new LlmPropertyModel()
|
||||
{
|
||||
Name = p.Name,
|
||||
Type = p.ValueType?.ToString() ?? "unknown",
|
||||
Nullable = p.ValueType?.Nullable ?? false,
|
||||
Access = p.Permission switch
|
||||
{
|
||||
global::Esiur.Resource.PropertyPermission.Read => "read",
|
||||
global::Esiur.Resource.PropertyPermission.Write => "write",
|
||||
global::Esiur.Resource.PropertyPermission.ReadWrite => "readwrite",
|
||||
_ => "readwrite"
|
||||
}
|
||||
};
|
||||
|
||||
if (p.Annotations != null && p.Annotations.Count > 0)
|
||||
{
|
||||
if (p.Annotations.ContainsKey(""))
|
||||
pm.Annotation = p.Annotations[""];
|
||||
else
|
||||
pm.Annotation = String.Join("; ", p.Annotations.Select(kv => kv.Key + ": " + kv.Value));
|
||||
}
|
||||
|
||||
m.Properties.Add(pm);
|
||||
}
|
||||
|
||||
// functions
|
||||
foreach (var f in typeDef.Functions)
|
||||
{
|
||||
var fm = new LlmFunctionModel()
|
||||
{
|
||||
Name = f.Name,
|
||||
Returns = f.ReturnType?.ToString() ?? "void"
|
||||
};
|
||||
|
||||
if (f.Annotations != null && f.Annotations.Count > 0)
|
||||
{
|
||||
if (f.Annotations.ContainsKey(""))
|
||||
fm.Annotation = f.Annotations[""];
|
||||
else
|
||||
fm.Annotation = String.Join("; ", f.Annotations.Select(kv => kv.Key + ": " + kv.Value));
|
||||
}
|
||||
|
||||
if (f.Arguments != null)
|
||||
{
|
||||
foreach (var a in f.Arguments)
|
||||
{
|
||||
var pa = new LlmParameterModel()
|
||||
{
|
||||
Name = a.Name,
|
||||
Type = a.Type?.ToString() ?? "unknown",
|
||||
Nullable = a.Type?.Nullable ?? false
|
||||
};
|
||||
|
||||
if (a.Annotations != null && a.Annotations.Count > 0)
|
||||
{
|
||||
if (a.Annotations.ContainsKey(""))
|
||||
pa.Annotation = a.Annotations[""];
|
||||
else
|
||||
pa.Annotation = String.Join("; ", a.Annotations.Select(kv => kv.Key + ": " + kv.Value));
|
||||
}
|
||||
|
||||
fm.Parameters.Add(pa);
|
||||
}
|
||||
}
|
||||
|
||||
m.Functions.Add(fm);
|
||||
}
|
||||
|
||||
// events
|
||||
foreach (var e in typeDef.Events)
|
||||
{
|
||||
var em = new LlmEventModel()
|
||||
{
|
||||
Name = e.Name
|
||||
};
|
||||
|
||||
// single argument for event
|
||||
if (e.ArgumentType != null)
|
||||
{
|
||||
var pa = new LlmParameterModel()
|
||||
{
|
||||
Name = "arg",
|
||||
Type = e.ArgumentType.ToString(),
|
||||
Nullable = e.ArgumentType.Nullable
|
||||
};
|
||||
|
||||
em.Parameters.Add(pa);
|
||||
}
|
||||
|
||||
if (e.Annotations != null && e.Annotations.Count > 0)
|
||||
{
|
||||
if (e.Annotations.ContainsKey(""))
|
||||
em.Annotation = e.Annotations[""];
|
||||
else
|
||||
em.Annotation = String.Join("; ", e.Annotations.Select(kv => kv.Key + ": " + kv.Value));
|
||||
}
|
||||
|
||||
m.Events.Add(em);
|
||||
}
|
||||
|
||||
// constants
|
||||
foreach (var c in typeDef.Constants)
|
||||
{
|
||||
var cm = new LlmConstantModel()
|
||||
{
|
||||
Name = c.Name,
|
||||
Type = c.ValueType?.ToString() ?? "unknown",
|
||||
Value = c.Value
|
||||
};
|
||||
|
||||
if (c.Annotations != null && c.Annotations.Count > 0)
|
||||
{
|
||||
if (c.Annotations.ContainsKey(""))
|
||||
cm.Annotation = c.Annotations[""];
|
||||
else
|
||||
cm.Annotation = String.Join("; ", c.Annotations.Select(kv => kv.Key + ": " + kv.Value));
|
||||
}
|
||||
|
||||
m.Constants.Add(cm);
|
||||
}
|
||||
|
||||
// usage rules - optional annotation
|
||||
if (typeDef.Annotations != null && typeDef.Annotations.Count > 0)
|
||||
{
|
||||
if (typeDef.Annotations.ContainsKey("usage_rules"))
|
||||
{
|
||||
var v = typeDef.Annotations["usage_rules"];
|
||||
if (!String.IsNullOrEmpty(v))
|
||||
{
|
||||
var parts = v.Split(new[] { '\n', ';', ',' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (var p in parts)
|
||||
m.UsageRules.Add(p.Trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\Esiur\Esiur.csproj" />
|
||||
<ProjectReference Include="..\..\Esiur\Esiur.csproj" OutputItemType="Analyzer"/>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
12
Tests/Annotations/LlmDecision.cs
Normal file
12
Tests/Annotations/LlmDecision.cs
Normal file
@@ -0,0 +1,12 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace Esiur.Tests.Annotations
|
||||
{
|
||||
public sealed class LlmDecision
|
||||
{
|
||||
public string? Function { get; set; }
|
||||
public string? Reason { get; set; }
|
||||
}
|
||||
}
|
||||
171
Tests/Annotations/LlmRunner.cs
Normal file
171
Tests/Annotations/LlmRunner.cs
Normal file
@@ -0,0 +1,171 @@
|
||||
using Esiur.Schema.Llm;
|
||||
using OpenAI;
|
||||
using OpenAI.Chat;
|
||||
using System;
|
||||
using System.ClientModel;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.NetworkInformation;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Esiur.Tests.Annotations
|
||||
{
|
||||
public class LlmRunner
|
||||
{
|
||||
public async Task RunAsync(ServiceNode node, string endpoint, ApiKeyCredential apiKey, string modelName,
|
||||
int tickDelayMs = 1000)
|
||||
{
|
||||
var client = new OpenAIClient(apiKey, new OpenAIClientOptions() { Endpoint = new Uri(endpoint) });
|
||||
var chat = client.GetChatClient("microsoft/phi-4");
|
||||
|
||||
var typeModel = LlmTypeModel.FromTypeDef(node.Instance?.Definition);
|
||||
|
||||
var ticks = new List<TickState>
|
||||
{
|
||||
new() { Load = 35, ErrorCount = 0, Enabled = true },
|
||||
new() { Load = 88, ErrorCount = 1, Enabled = true },
|
||||
new() { Load = 42, ErrorCount = 4, Enabled = true },
|
||||
new() { Load = 18, ErrorCount = 0, Enabled = false },
|
||||
new() { Load = 91, ErrorCount = 5, Enabled = true },
|
||||
new() { Load = 25, ErrorCount = 0, Enabled = true }
|
||||
};
|
||||
|
||||
for (int i = 0; i < ticks.Count; i++)
|
||||
{
|
||||
var tick = ticks[i];
|
||||
|
||||
// Simulate property changes for this tick
|
||||
node.Load = tick.Load;
|
||||
node.ErrorCount = tick.ErrorCount;
|
||||
node.Enabled = tick.Enabled;
|
||||
|
||||
var jsonModel = typeModel.ToJson(node);
|
||||
Console.WriteLine($"Tick {i + 1}");
|
||||
Console.WriteLine($"State: Load={node.Load}, ErrorCount={node.ErrorCount}, Enabled={node.Enabled}");
|
||||
|
||||
var prompt = BuildPrompt(jsonModel, node, i + 1);
|
||||
|
||||
string llmRaw = await InferAsync(chat, prompt);
|
||||
var decision = ParseDecision(llmRaw);
|
||||
|
||||
bool invoked = InvokeIfValid(node, decision?.Function);
|
||||
|
||||
Console.WriteLine($"LLM: {llmRaw}");
|
||||
Console.WriteLine($"Invoked: {invoked}");
|
||||
Console.WriteLine($"After: Load={node.Load}, ErrorCount={node.ErrorCount}, Enabled={node.Enabled}");
|
||||
Console.WriteLine(new string('-', 60));
|
||||
|
||||
await Task.Delay(tickDelayMs);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async Task<string> InferAsync(
|
||||
ChatClient chat,
|
||||
string prompt)
|
||||
{
|
||||
|
||||
List<ChatMessage> messages = new List<ChatMessage>
|
||||
{
|
||||
new SystemChatMessage("You control a distributed resource. " +
|
||||
"Return only JSON with fields: function and reason."),
|
||||
new UserChatMessage(prompt)
|
||||
};
|
||||
|
||||
var result = await chat.CompleteChatAsync(messages);
|
||||
|
||||
return result.Value.Content[0].Text;
|
||||
}
|
||||
private static string BuildPrompt(string typeDefJson, ServiceNode node, int tick)
|
||||
{
|
||||
return
|
||||
$@"You are given a runtime type definition for a distributed resource and its current state.
|
||||
Choose at most one function to call.
|
||||
Use only the functions defined in the type definition.
|
||||
Do not invent functions.
|
||||
If no action is needed, return function as null.
|
||||
Return only JSON in this format:
|
||||
{{ ""function"": ""Restart|ResetErrors|Enable|Disable|null"", ""reason"": ""short explanation"" }}
|
||||
|
||||
Type Definition:
|
||||
{typeDefJson}";
|
||||
|
||||
//Current Tick: {tick}
|
||||
//Current State:
|
||||
//{{
|
||||
// ""Load"": {node.Load},
|
||||
// ""ErrorCount"": {node.ErrorCount},
|
||||
// ""Enabled"": {(node.Enabled ? "true" : "false")}
|
||||
//}}";
|
||||
}
|
||||
|
||||
private static LlmDecision? ParseDecision(string text)
|
||||
{
|
||||
try
|
||||
{
|
||||
var json = ExtractJson(text);
|
||||
|
||||
return JsonSerializer.Deserialize<LlmDecision>(
|
||||
json,
|
||||
new JsonSerializerOptions
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
});
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static string ExtractJson(string text)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(text))
|
||||
return "{}";
|
||||
|
||||
text = text.Trim();
|
||||
|
||||
if (text.StartsWith("```"))
|
||||
{
|
||||
var firstNewline = text.IndexOf('\n');
|
||||
if (firstNewline >= 0)
|
||||
text = text[(firstNewline + 1)..];
|
||||
|
||||
var lastFence = text.LastIndexOf("```", StringComparison.Ordinal);
|
||||
if (lastFence >= 0)
|
||||
text = text[..lastFence];
|
||||
}
|
||||
|
||||
return text.Trim();
|
||||
}
|
||||
|
||||
private static bool InvokeIfValid(ServiceNode node, string? functionName)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(functionName) ||
|
||||
string.Equals(functionName, "null", StringComparison.OrdinalIgnoreCase))
|
||||
return false;
|
||||
|
||||
switch (functionName)
|
||||
{
|
||||
case "Restart":
|
||||
node.Restart();
|
||||
return true;
|
||||
|
||||
case "ResetErrors":
|
||||
node.ResetErrors();
|
||||
return true;
|
||||
|
||||
case "Enable":
|
||||
node.Enable();
|
||||
return true;
|
||||
|
||||
case "Disable":
|
||||
node.Disable();
|
||||
return true;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,32 @@
|
||||
// The endpoint for LM Studio's local server
|
||||
using Esiur.Resource;
|
||||
using Esiur.Stores;
|
||||
using Esiur.Tests.Annotations;
|
||||
using OpenAI;
|
||||
using OpenAI.Chat;
|
||||
using System.ClientModel;
|
||||
using System.Data;
|
||||
|
||||
var wh = new Warehouse();
|
||||
|
||||
await wh.Put("store", new MemoryStore());
|
||||
var node = await wh.Put("store/service", new ServiceNode());
|
||||
|
||||
var endpoint = "http://localhost:1234/v1";
|
||||
var credential = new ApiKeyCredential("lm-studio");
|
||||
|
||||
var client = new OpenAIClient(credential, new OpenAIClientOptions() { Endpoint = new Uri(endpoint) });
|
||||
//var client = new OpenAIClient(credential, new OpenAIClientOptions() { Endpoint = new Uri(endpoint) });
|
||||
|
||||
var chat = client.GetChatClient("microsoft/phi-4");
|
||||
//var chat = client.GetChatClient("microsoft/phi-4");
|
||||
|
||||
var llmRunner = new LlmRunner();
|
||||
|
||||
await llmRunner.RunAsync(
|
||||
node,
|
||||
endpoint,
|
||||
credential,
|
||||
"microsoft/phi-4"
|
||||
);
|
||||
|
||||
//List<ChatMessage> messages = new List<ChatMessage>
|
||||
//{
|
||||
@@ -20,8 +37,8 @@ var chat = client.GetChatClient("microsoft/phi-4");
|
||||
//// Send the entire conversation history
|
||||
//ChatCompletion completion = chat.CompleteChat(messages);
|
||||
|
||||
var response = await chat.CompleteChatAsync(
|
||||
"Explain what Pi means"
|
||||
);
|
||||
//var response = await chat.CompleteChatAsync(
|
||||
// "Explain what Pi means"
|
||||
//);
|
||||
|
||||
Console.WriteLine(response.Value.Content[0].Text);
|
||||
//Console.WriteLine(response.Value.Content[0].Text);
|
||||
51
Tests/Annotations/ServiceNode.cs
Normal file
51
Tests/Annotations/ServiceNode.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
using Esiur.Resource;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace Esiur.Tests.Annotations
|
||||
{
|
||||
[Annotation("Represents a managed service node with load, error count, and enable state. Functions control service operation.")]
|
||||
[Annotation("usage_rules", @"1.Choose at most one function per tick.
|
||||
2. Use only functions defined in the functions list.
|
||||
3. Do not invent properties or functions.
|
||||
4. Base the decision only on current property values and annotations.")]
|
||||
[Resource]
|
||||
public partial class ServiceNode
|
||||
{
|
||||
[Annotation("Current service load percentage from 0 to 100. Values above 80 indicate overload.")]
|
||||
[Export] int load;
|
||||
|
||||
[Annotation("Number of recent errors detected in the service. Values above 3 indicate instability.")]
|
||||
[Export] int errorCount;
|
||||
|
||||
[Annotation("True when the service is enabled and allowed to run. False means the service is disabled.")]
|
||||
[Export] bool enabled;
|
||||
|
||||
[Annotation("Restart the service when load is very high or when repeated errors indicate instability.")]
|
||||
[Export] public void Restart()
|
||||
{
|
||||
ErrorCount = 0;
|
||||
Load = 10;
|
||||
Enabled = true;
|
||||
}
|
||||
|
||||
[Annotation("Clear the error counter when errors were temporary and a restart is not required.")]
|
||||
[Export] public void ResetErrors()
|
||||
{
|
||||
ErrorCount = 0;
|
||||
}
|
||||
|
||||
[Annotation("Enable the service if it is disabled and should be running.")]
|
||||
[Export] public void Enable()
|
||||
{
|
||||
Enabled = true;
|
||||
}
|
||||
|
||||
[Annotation("Disable the service if it should stop processing requests.")]
|
||||
[Export] public void Disable()
|
||||
{
|
||||
Enabled = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
14
Tests/Annotations/TickState.cs
Normal file
14
Tests/Annotations/TickState.cs
Normal file
@@ -0,0 +1,14 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace Esiur.Tests.Annotations
|
||||
{
|
||||
public sealed class TickState
|
||||
{
|
||||
public int Load { get; set; }
|
||||
public int ErrorCount { get; set; }
|
||||
public bool Enabled { get; set; }
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user