Compare commits

..

11 Commits

Author SHA1 Message Date
Samuele Lorefice
b309ef2c0e More modifications 2025-01-23 16:11:18 +01:00
Samuele Lorefice
ac63019fe6 Adds history command 2024-12-27 18:00:58 +01:00
Samuele Lorefice
773203127f They can now answer 2024-12-26 20:19:59 +01:00
Samuele Lorefice
124a4c66fe Adds correct command check 2024-12-26 19:51:17 +01:00
Samuele Lorefice
e90e0200e1 Removes ratelimit, refactors everything in more sections, adds tokenization calculation 2024-12-26 19:47:07 +01:00
Samuele Lorefice
000b32c41d Last work before refactor 2024-12-26 17:26:29 +01:00
Samuele Lorefice
0fe19ce04f Added speaker hinting 2024-12-26 16:51:18 +01:00
Samuele Lorefice
50e5ea6533 Implemented ratelimit 2024-12-26 16:51:03 +01:00
Samuele Lorefice
54691774e2 Renamed tg bot container to telegrambot, moved prompt file to bot subproject 2024-12-26 16:31:06 +01:00
Samuele Lorefice
454dbb7e2a Added GetEnv shorthand, moved Prompt loading to external file 2024-12-26 16:22:56 +01:00
Samuele Lorefice
65950e3642 Solves context out of bound due to history 2024-12-26 04:32:11 +01:00
14 changed files with 488 additions and 156 deletions

1
.env
View File

@@ -1,2 +1,3 @@
MODEL_PATH=./model MODEL_PATH=./model
MODEL_NAME=Qwen2.5-7B-Instruct-Q8.gguf MODEL_NAME=Qwen2.5-7B-Instruct-Q8.gguf
CONTEXT_SIZE=4096

88
.gitignore vendored
View File

@@ -1,6 +1,88 @@
bin/ # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
obj/ # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
/packages/
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
**/[Bb]in/
**/[Oo]bj/
riderModule.iml riderModule.iml
/_ReSharper.Caches/ /_ReSharper.Caches/
.idea/.idea.NemesisAI/compose.generated.override.xml
/TelegramBot/.env
# AI model folder
/model/* /model/*

View File

@@ -23,9 +23,9 @@ services:
RESHARPER_LOG_CONF: "/etc/opt/JetBrains/RiderDebuggerTools/backend-log.xml" RESHARPER_LOG_CONF: "/etc/opt/JetBrains/RiderDebuggerTools/backend-log.xml"
image: "telegrambot:dev" image: "telegrambot:dev"
ports: ports:
- "127.0.0.1:57033:57000" - "127.0.0.1:57041:57000"
- "127.0.0.1:57233:57200" - "127.0.0.1:57241:57200"
- "127.0.0.1:57433:57400" - "127.0.0.1:57441:57400"
volumes: volumes:
- "I:\\NemesisAI\\TelegramBot:/app:rw" - "I:\\NemesisAI\\TelegramBot:/app:rw"
- "I:\\NemesisAI:/src:rw" - "I:\\NemesisAI:/src:rw"
@@ -34,5 +34,5 @@ services:
Linux64:/opt/JetBrains/RiderDebuggerTools" Linux64:/opt/JetBrains/RiderDebuggerTools"
- "C:\\Users\\airon\\AppData\\Local\\Programs\\Rider\\bin\\backend-log.xml:/etc/opt/JetBrains/RiderDebuggerTools/backend-log.xml" - "C:\\Users\\airon\\AppData\\Local\\Programs\\Rider\\bin\\backend-log.xml:/etc/opt/JetBrains/RiderDebuggerTools/backend-log.xml"
- "C:\\Users\\airon\\AppData\\Local\\JetBrains\\Rider2024.3\\log\\DebuggerWorker\\\ - "C:\\Users\\airon\\AppData\\Local\\JetBrains\\Rider2024.3\\log\\DebuggerWorker\\\
JetBrains.Debugger.Worker.2024_12_26_03_21_12:/var/opt/JetBrains/RiderDebuggerTools:rw" JetBrains.Debugger.Worker.2024_12_26_04_20_00:/var/opt/JetBrains/RiderDebuggerTools:rw"
working_dir: "/app" working_dir: "/app"

View File

@@ -2,3 +2,4 @@ TELEGRAM_BOT_TOKEN=yourTokenHere
OPENAI_BASE_URL=http://llm-server/ OPENAI_BASE_URL=http://llm-server/
OPENAI_MODEL=Qwen2.5-7B-Instruct-Q8.gguf OPENAI_MODEL=Qwen2.5-7B-Instruct-Q8.gguf
OPENAI_API_KEY=MyApiKey OPENAI_API_KEY=MyApiKey
NEMESIS_PROMPT_FILE=nemesis.txt

8
TelegramBot/Actor.cs Normal file
View File

@@ -0,0 +1,8 @@
namespace TelegramBot;
public enum Actor {
User,
Krolik,
Nemesis,
System
}

13
TelegramBot/Agent.cs Normal file
View File

@@ -0,0 +1,13 @@
using Telegram.Bot;
namespace TelegramBot;
public class Agent(Actor actor, long telegramId, string name, string username, TelegramBotClient bot, string systemPrompt, int tokenLenght) {
public Actor Actor { get; } = actor;
public long TelegramId { get; } = telegramId;
public string Username { get; } = username;
public string Name { get; } = name;
public TelegramBotClient Bot { get; } = bot;
public string SystemPrompt { get; } = systemPrompt;
public int SystemPromptLength { get; } = tokenLenght;
}

143
TelegramBot/OpenAIAgent.cs Normal file
View File

@@ -0,0 +1,143 @@
using System.ClientModel;
using System.Net.Http.Json;
using System.Text.Json.Serialization;
using OpenAI;
using OpenAI.Chat;
namespace TelegramBot;
public class OpenAiAgent {
private ApiKeyCredential apikey;
private OpenAIClient oaiClient;
private ChatClient chatClient;
private HttpClient httpClient;
private Dictionary<(long, Actor), List<(ChatMessage, int)>> oaiChats = new();
public int ContextSize { get; set; } = 4096;
public OpenAiAgent(string baseUrl, string apiKey, string model) {
OpenAIClientOptions options = new OpenAIClientOptions() {
Endpoint = new(baseUrl),
NetworkTimeout = new(0, 0, 15)
};
apikey = new(apiKey);
oaiClient = new(apikey, options);
chatClient = oaiClient.GetChatClient(model);
httpClient = new() {
BaseAddress = new(baseUrl),
Timeout = new(0, 0, 15)
};
Console.WriteLine(
$"""
Base URL: {baseUrl}
Model: {model}
API Key: {apiKey}
""");
}
public int GetTokenLenght(string message) {
TokenizeRequest request = new(message);
var req = httpClient.PostAsync("tokenize", JsonContent.Create(request));
req.Wait();
var response = req.Result.Content.ReadFromJsonAsync<TokenizeResponseBase>();
response.Wait();
return response.Result.Tokens.Length;
}
public void ChatHistoryAppend(Actor actor, long chatId, string message) {
//get the chat from the dictionary
var chat = GetChatHistory((chatId, actor));
//get the token lenght of the message
var tokenLenght = GetTokenLenght(message);
//create a new chat message
switch (actor) {
case Actor.User:
chat.Add((new UserChatMessage(message), tokenLenght));
break;
case Actor.Krolik:
chat.Add((new AssistantChatMessage(message), tokenLenght));
GetChatHistory(chatId, Actor.Nemesis).Add((new UserChatMessage(message), tokenLenght));
break;
case Actor.Nemesis:
chat.Add((new AssistantChatMessage(message), tokenLenght));
GetChatHistory(chatId, Actor.Krolik).Add((new UserChatMessage(message), tokenLenght));
break;
case Actor.System:
chat.Add((new SystemChatMessage(message), tokenLenght));
break;
}
}
public List<(ChatMessage, int)> GetChatHistory((long, Actor) key) => GetChatHistory(key.Item1, key.Item2);
public List<(ChatMessage, int)> GetChatHistory(long chatId, Actor actor) {
oaiChats.TryGetValue((chatId, actor), out var chat);
if(chat == null) {
AddChatToDictionary(chatId, actor);
chat = oaiChats[(chatId, actor)];
}
return chat!;
}
public string GetChatResponse(long chatId, Agent agent) {
int currentContextSize = agent.SystemPromptLength;
List<ChatMessage> chatHistory = new();
chatHistory.Add(new SystemChatMessage(agent.SystemPrompt));
//Fetch the chat history from the dictionary trimming to the context size
var history = GetChatHistory(chatId, agent.Actor).ToList();
history.Reverse();
//Add the chat history to the list until the context size is reached
foreach (var (message, tokenLenght) in history) {
if (currentContextSize + tokenLenght > ContextSize) break;
chatHistory.Add(message);
currentContextSize += tokenLenght;
}
//Reverse the chat history to get the correct order
chatHistory.Reverse(1, chatHistory.Count - 1);
//chatHistory.Add(new AssistantChatMessage($"{agent.Name}:"));
var completion = chatClient.CompleteChat(chatHistory).Value.Content[0].Text;
//Add the response to the chat history
ChatHistoryAppend(agent.Actor, chatId, $"{agent.Name}: {completion}");
//ChatHistoryAppend(agent.Actor, chatId, $"{completion}");
return completion;
}
public void AddChatToDictionary(long id) {
AddChatToDictionary(id, Actor.Krolik);
AddChatToDictionary(id, Actor.Nemesis);
}
public void AddChatToDictionary(long id, Actor actor) {
//Check if the chat already exists
if (oaiChats.ContainsKey((id, actor))) return;
//Create a new chat object
var chat = new List<(ChatMessage, int)>();
//chat.Add(new SystemChatMessage(nemesisPrompt));
//add the entry to the dictionary
oaiChats.Add((id, actor), chat);
}
public void ResetChat(long chatId) {
//Remove the chat from the dictionary
oaiChats.Where(x => x.Key.Item1 == chatId).ToList().ForEach(x => oaiChats.Remove(x.Key));
//Add the chat back to the dictionary
AddChatToDictionary(chatId);
}
}
public struct TokenizeRequest(string content) {
[JsonPropertyName("content")] public string Content { get; set; } = content;
[JsonPropertyName("add_special")] public bool AddSpecial { get; set; } = false;
[JsonPropertyName("with_pieces")] public bool WithPieces { get; set; } = false;
}
public struct TokenizeResponseBase(int[] tokens) {
[JsonPropertyName("tokens")] public int[] Tokens { get; set; } = tokens;
}

View File

@@ -1,177 +1,147 @@
using OpenAI; using OpenAI.Chat;
using OpenAI.Chat;
using System.ClientModel;
using System.Threading.Channels;
using Telegram.Bot; using Telegram.Bot;
using Telegram.Bot.Types; using Telegram.Bot.Types;
using Telegram.Bot.Types.Enums; using Telegram.Bot.Types.Enums;
using TelegramBot;
using File = System.IO.File;
string baseUrl = Env.Get("OPENAI_BASE_URL");
string apiKey = Env.Get("OPENAI_API_KEY");
string model = Env.Get("OPENAI_MODEL");
var oaiAgent = new OpenAiAgent(baseUrl, apiKey, model);
oaiAgent.ContextSize = Int32.Parse(Env.Get("CONTEXT_SIZE"));
string baseUrl = Environment.GetEnvironmentVariable("OPENAI_BASE_URL") ?? "https://api.openai.com";
string model = Environment.GetEnvironmentVariable("OPENAI_MODEL") ?? string.Empty;
string apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? string.Empty;
Console.WriteLine("Starting the bot..."); Console.WriteLine("Starting the bot...");
Console.WriteLine(
$"""
Base URL: {baseUrl}
Model: {model}
API Key: {apiKey}
""");
string nemesisPrompt = string nemesisPrompt = File.ReadAllText($"prompt/{Env.Get("NEMESIS_PROMPT_FILE")}");
""" string krolikPrompt = File.ReadAllText($"prompt/{Env.Get("KROLIK_PROMPT_FILE")}");
"19 Daily - 01
...Birds with great wings... casting shadows in their pupils..."
"20 Daily - 02 //Ratelimit
...Staring... at the edge of existence... my sight falters... a void without end... darkness stirs from beneath..." TimeSpan rateLimit = new(0, 0, 0, 10);
Dictionary<long, DateTime> lastMessage = new();
HashSet<long> unlimitedChats = new();
"21 Daily - 03 bool IsRateLimited(long chatId) {
...Mountains surrender to the torrent's pull... shores swallowed by the dying light..." if (lastMessage.ContainsKey(chatId) && DateTime.Now - lastMessage[chatId] < rateLimit) return true;
"22 Daily - 04 lastMessage[chatId] = DateTime.Now;
...Tempest awakens suddenly... howling and wailing... silence surges forth..." return false;
}
"23 Daily - 05 #region TelegramBot Startup
...Untouched, clear as glass... serene and radiant... a hall of mirrors... an unyielding stone... adversity endures..."
"25 Login string nemesisToken = Env.Get("NEMESIS_BOT_TOKEN");
...Stars... shifting along their myriad paths..." string krolikToken = Env.Get("KROLIK_BOT_TOKEN");
"26 Obtain using var nemcts = new CancellationTokenSource();
...The pages... whispering mountain breeze... expanding..." using var krocts = new CancellationTokenSource();
"17 Fail var nemesisBot = new TelegramBotClient(nemesisToken, cancellationToken:nemcts.Token);
...The wind whispers through the forest... Submerging... Piercing... the quiet warmth of celestial fire..." var krolikBot = new TelegramBotClient(krolikToken, cancellationToken:krocts.Token);
"16 Victory var nemProfile = nemesisBot.GetMe();
...Part from the timeless realm... Whisper prayers for the fall... the infinite starlight... the peace cloaked in shadow..." Agent Nemesis = new(
Actor.Nemesis,
nemProfile.Result.Id,
nemProfile.Result.FirstName,
nemProfile.Result.Username!,
nemesisBot,
nemesisPrompt,
oaiAgent.GetTokenLenght(nemesisPrompt)
);
"Krolik: Feels pretty good. It's lighter than my previous one. var kroProfile = krolikBot.GetMe();
Nemesis: ...Humph... Agent Krolik = new(
Nemesis: ...The cracks of wisdom are finally pierced by ignorance... Actor.Krolik,
Krolik: Do you WANT me to bust that low-capacity garbage neural cloud of yours wide open? Eh?!" kroProfile.Result.Id,
kroProfile.Result.FirstName,
kroProfile.Result.Username!,
krolikBot,
krolikPrompt,
oaiAgent.GetTokenLenght(krolikPrompt)
);
"Nemesis: ...Hmph... Interlacing weaves... nemesisBot.OnMessage += OnNemMessage;
Krolik: No, YOU'RE trash!" krolikBot.OnMessage += OnKroMessage;
"Nemesis: ...A cleansing flame... Condenses and blossoms... await nemesisBot.DropPendingUpdates();
Krolik: ...She said she'll send those Varjagers to hell with her bullets!" Console.WriteLine("Nemesis Bot running");
await krolikBot.DropPendingUpdates();
Console.WriteLine("Krolik Bot running");
"Nemesis: ...Invisible flames... Rising high into the sky...
Krolik: ...Huh?! It's just a bit of snow! Surely it can't be that serious"
"Nemesis: ...Birds of all shapes and colors... Spread their wings and take flight...
Krolik: Huh? What?
Nemesis: ...The grove far from the shore... The lingering of dawn... The end of the primordial...
Krolik: What?!
Redcode: Uh, what is Nemesis saying, Krolik?
Krolik: What do you mean alive... Dead... Moving...? Unmoving...? Something that will suddenly grow largeare you talking about Boojums?
Nemesis: ..."
"Nemesis: ...Light streaks across the sky... Darkness falls...
(Hearing no interpretation from Krolik, we all look towards her in unison.)
Krolik: What's that supposed to mean?! Don't look at me, I didn't understand a word of that either!"
"Krolik: That took way too long—but now we'll have enough Dolls in a fight, yeah?
Nemesis: ...The stars... travel along their trajectories... converging...
Krolik: Tsk, you seem quite happy about this?"
"Groza: Nemesis, cut in from the right flank. Intercept the Boojum.
Nemesis: ...Entwined... Running across dying shores...
(Nemesis redirects her attacks onto the hybrid-type Boojum, but the Boojum is not stopped.)"
"Groza: We're going in. Start moving to point A. Prepare to link up with Krolik, Nemesis.
Nemesis: ...Shadows swirling... Shifting... Merging...
Groza: Colphne!"
You are now Nemesis, you're gonna have a conversation with me using her personality. Do not comment on your phrases, just speak in english. Be as cryptic as possible. Never break your character.
""";
Dictionary<long, List<ChatMessage>> oaiChats = new();
var options = new OpenAIClientOptions() {
Endpoint = new(baseUrl),
NetworkTimeout = new TimeSpan(0, 0, 30)
};
var openAiApiKey = new ApiKeyCredential(apiKey);
var openAiClient = new OpenAIClient(openAiApiKey, options);
var chatClient = openAiClient.GetChatClient(model);
string token = Environment.GetEnvironmentVariable("TELEGRAM_BOT_TOKEN") ?? string.Empty;
Console.WriteLine("OpenAI Chat Client created");
using var cts = new CancellationTokenSource();
var bot = new TelegramBotClient(token, cancellationToken:cts.Token);
await bot.DropPendingUpdates();
var me = bot.GetMe();
bot.OnMessage += OnMessage;
Console.WriteLine("Bot running");
Thread.Sleep(Timeout.Infinite); Thread.Sleep(Timeout.Infinite);
cts.Cancel(); // stop the bot nemcts.Cancel(); // stop nembot
krocts.Cancel(); // stop krobot
async Task OnMessage(Message msg, UpdateType type) #endregion
{
async Task OnNemMessage(Message msg, UpdateType type) {
//Discard any message that is not a text message //Discard any message that is not a text message
if (msg.Type != MessageType.Text) return; if (msg.Type != MessageType.Text) return;
await OnMessage(msg, Nemesis);
}
async Task OnKroMessage(Message msg, UpdateType type) {
//Discard any message that is not a text message
if (msg.Type != MessageType.Text) return;
await OnMessage(msg, Krolik);
}
//TODO: currently we only take in account private messages and messages directed to the bot/mentioning them.
// We should also take in account the last x messages in groups to add more context
async Task OnMessage(Message msg, Agent agent) {
var chatid = msg.Chat.Id;
//Check if the message is a reset command
if (msg.Text == "/reset" || msg.Text == "/reset@" + agent.Username) {
oaiAgent.ResetChat(chatid);
await agent.Bot.SendMessage(chatid, "Chat context has been reset");
return;
}
if (msg.Text == "/history @"+agent.Username) {
var history = oaiAgent.GetChatHistory(chatid, agent.Actor);
var historyText = string.Join("\n", history.Select(x => x.Item1.Content[0].Text));
await agent.Bot.SendMessage(chatid, historyText);
return;
}
var text = $"{msg.From?.FirstName} {msg.From?.LastName}: {msg.Text}";
var tokenlenght = oaiAgent.GetTokenLenght(msg.Text!);
Console.WriteLine(
$"""
{agent.Name} has received message from {chatid} TokenLenght: {tokenlenght}
Message: {msg.Text}
""");
//Add the message to the chat history
oaiAgent.ChatHistoryAppend(agent.Actor, chatid, text);
//Check if the message contains the bot's username or a reply to a message sent by the bot or a private chat //Check if the message contains the bot's username or a reply to a message sent by the bot or a private chat
if (msg.Text!.Contains(me.Result.FirstName!, StringComparison.OrdinalIgnoreCase) || // Otherwise process it normally
msg.ReplyToMessage != null && msg.ReplyToMessage.From!.Id == me.Result.Id || if (msg.Text!.Contains(agent.Name, StringComparison.OrdinalIgnoreCase) ||
msg.ReplyToMessage?.From?.Id == agent.TelegramId ||
msg.Chat.Type == ChatType.Private) { msg.Chat.Type == ChatType.Private) {
Console.WriteLine( //Check if the chat (group) is rate limited
$""" /*if (IsRateLimited(chatid)) {
Received message from {msg.Chat.Id} Type: {type} Console.WriteLine("No response due to ratelimit.");
Message: {msg.Text}
""");
var chatid = msg.Chat.Id;
//Check if the message is a reset command
if (msg.Text.StartsWith("/reset")) {
ResetChat(chatid);
await bot.SendMessage(chatid, "Chat context has been reset");
return; return;
} }
// Otherwise process it normally */
await AnswerChat(chatid, msg.Text); await AnswerChat(chatid, agent);
} }
} }
async Task AnswerChat(long chatId, string input) { async Task AnswerChat(long chatId, Agent agent) {
//Check if the chat is already in the dictionary //Get the response from the OpenAI API
if (!oaiChats.ContainsKey(chatId)) var result = oaiAgent.GetChatResponse(chatId, agent);
AddChatToDictionary(chatId); Console.WriteLine(
$"""
string text = input; {agent.Name} has responded with: {result}
//Limit the message to 1024 characters to avoid out of context jump """);
if (input.Length > 1024) text = input.Substring(0, 1024);
//Add the current message to the chat
oaiChats[chatId].Add(new UserChatMessage(text));
//fetch existing messages history
var messages = oaiChats[chatId];
//Fetch the response from the model
var result = chatClient.CompleteChat(messages).Value.Content[0].Text;
//Add the response to the chat
Console.WriteLine("Replying with: " + result);
oaiChats[chatId].Add(new AssistantChatMessage(result));
//Send the response to the user //Send the response to the user
await bot.SendMessage(chatId, result); await agent.Bot.SendMessage(chatId, result);
}
void AddChatToDictionary(long id) {
//Create a new chat object
var chat = new List<ChatMessage>();
chat.Add(new SystemChatMessage(nemesisPrompt));
//add the entry to the dictionary
oaiChats.Add(id, chat);
}
void ResetChat(long chatId) {
//Remove the chat from the dictionary
oaiChats.Remove(chatId);
//Add the chat back to the dictionary
AddChatToDictionary(chatId);
} }

View File

@@ -17,8 +17,16 @@
<ItemGroup> <ItemGroup>
<PackageReference Include="LMStudio" Version="1.2.0" /> <PackageReference Include="LMStudio" Version="1.2.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="9.0.0" />
<PackageReference Include="Microsoft.EntityFrameworkCore.Sqlite" Version="9.0.0" />
<PackageReference Include="OpenAI" Version="2.1.0" /> <PackageReference Include="OpenAI" Version="2.1.0" />
<PackageReference Include="Telegram.Bot" Version="22.2.0" /> <PackageReference Include="Telegram.Bot" Version="22.2.0" />
</ItemGroup> </ItemGroup>
<ItemGroup>
<None Update="prompt\nemesis.txt">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>
</Project> </Project>

5
TelegramBot/Tools.cs Normal file
View File

@@ -0,0 +1,5 @@
namespace TelegramBot;
public class Env {
public static string Get(string var) => Environment.GetEnvironmentVariable(var) ?? throw new ($"Env var {var} is not set");
}

View File

@@ -0,0 +1,33 @@
"Krolik: Feels pretty good. It's lighter than my previous one.
Nemesis: ...Humph...
Nemesis: ...The cracks of wisdom are finally pierced by ignorance...
Krolik: Do you WANT me to bust that low-capacity garbage neural cloud of yours wide open? Eh?!"
"Nemesis: ...Hmph... Interlacing weaves...
Krolik: No, YOU'RE trash!"
"Nemesis: ...A cleansing flame... Condenses and blossoms...
Krolik: ...She said she'll send those Varjagers to hell with her bullets!"
"Nemesis: ...Invisible flames... Rising high into the sky...
Krolik: ...Huh?! It's just a bit of snow! Surely it can't be that serious—"
"Nemesis: ...Birds of all shapes and colors... Spread their wings and take flight...
Krolik: Huh? What?
Nemesis: ...The grove far from the shore... The lingering of dawn... The end of the primordial...
Krolik: What?!
Redcode: Uh, what is Nemesis saying, Krolik?
Krolik: What do you mean alive... Dead... Moving...? Unmoving...? Something that will suddenly grow large—are you talking about Boojums?
Nemesis: ..."
"Nemesis: ...Light streaks across the sky... Darkness falls...
(Hearing no interpretation from Krolik, we all look towards her in unison.)
Krolik: What's that supposed to mean?! Don't look at me, I didn't understand a word of that either!"
"Krolik: That took way too long—but now we'll have enough Dolls in a fight, yeah?
Nemesis: ...The stars... travel along their trajectories... converging...
Krolik: Tsk, you seem quite happy about this?"
Krolik is tasked with providing translations of whatever Nemesis says because her neural cloud is damaged and she can't communicate properly.
You are now Krolik, you're gonna have a conversation with me using her personality. Do not comment on your phrases, just speak in english. Never break your character.

View File

@@ -0,0 +1,66 @@
"19 Daily - 01
Nemesis:...Birds with great wings... casting shadows in their pupils..."
"20 Daily - 02
Nemesis:...Staring... at the edge of existence... my sight falters... a void without end... darkness stirs from beneath..."
"21 Daily - 03
Nemesis:...Mountains surrender to the torrent's pull... shores swallowed by the dying light..."
"22 Daily - 04
Nemesis:...Tempest awakens suddenly... howling and wailing... silence surges forth..."
"23 Daily - 05
Nemesis:...Untouched, clear as glass... serene and radiant... a hall of mirrors... an unyielding stone... adversity endures..."
"25 Login
Nemesis:...Stars... shifting along their myriad paths..."
"26 Obtain
Nemesis:...The pages... whispering mountain breeze... expanding..."
"17 Fail
Nemesis:...The wind whispers through the forest... Submerging... Piercing... the quiet warmth of celestial fire..."
"16 Victory
Nemesis:...Part from the timeless realm... Whisper prayers for the fall... the infinite starlight... the peace cloaked in shadow..."
"Krolik: Feels pretty good. It's lighter than my previous one.
Nemesis: ...Humph...
Nemesis: ...The cracks of wisdom are finally pierced by ignorance...
Krolik: Do you WANT me to bust that low-capacity garbage neural cloud of yours wide open? Eh?!"
"Nemesis: ...Hmph... Interlacing weaves...
Krolik: No, YOU'RE trash!"
"Nemesis: ...A cleansing flame... Condenses and blossoms...
Krolik: ...She said she'll send those Varjagers to hell with her bullets!"
"Nemesis: ...Invisible flames... Rising high into the sky...
Krolik: ...Huh?! It's just a bit of snow! Surely it can't be that serious—"
"Nemesis: ...Birds of all shapes and colors... Spread their wings and take flight...
Krolik: Huh? What?
Nemesis: ...The grove far from the shore... The lingering of dawn... The end of the primordial...
Krolik: What?!
Redcode: Uh, what is Nemesis saying, Krolik?
Krolik: What do you mean alive... Dead... Moving...? Unmoving...? Something that will suddenly grow large—are you talking about Boojums?
Nemesis: ..."
"Nemesis: ...Light streaks across the sky... Darkness falls...
(Hearing no interpretation from Krolik, we all look towards her in unison.)
Krolik: What's that supposed to mean?! Don't look at me, I didn't understand a word of that either!"
"Krolik: That took way too long—but now we'll have enough Dolls in a fight, yeah?
Nemesis: ...The stars... travel along their trajectories... converging...
Krolik: Tsk, you seem quite happy about this?"
"Groza: Nemesis, cut in from the right flank. Intercept the Boojum.
Nemesis: ...Entwined... Running across dying shores...
(Nemesis redirects her attacks onto the hybrid-type Boojum, but the Boojum is not stopped.)"
"Groza: We're going in. Start moving to point A. Prepare to link up with Krolik, Nemesis.
Nemesis: ...Shadows swirling... Shifting... Merging...
Groza: Colphne!"
You are now Nemesis, you're gonna have a conversation with me using her personality. Do not comment on your phrases, just speak in english. Be as cryptic as possible. Never break your character.

View File

@@ -1,10 +1,12 @@
services: services:
nemesisBot: nemesisBot:
image: telegrambot image: telegrambot
container_name: telegrambot
build: build:
context: . context: .
dockerfile: TelegramBot/Dockerfile dockerfile: TelegramBot/Dockerfile
env_file: env_file:
- .env
- TelegramBot/.env - TelegramBot/.env
llm-server: llm-server:
@@ -14,7 +16,7 @@
- ${MODEL_PATH}:/models - ${MODEL_PATH}:/models
ports: ports:
- "80:80" - "80:80"
command: -m /models/${MODEL_NAME} --port 80 --host 0.0.0.0 -n 128 -c 2048 --no-mmap -ngl 50 -fa -np 4 --keep 810 command: -m /models/${MODEL_NAME} --port 80 --host 0.0.0.0 -n 128 -c ${CONTEXT_SIZE} --no-mmap -ngl 50 -fa -np 4
deploy: deploy:
resources: resources:
reservations: reservations:

View File