@* @page "/chatroom" *@
@page "/"
@inject NavigationManager navigationManager
@using Microsoft.AspNetCore.SignalR.Client;
@using Microsoft.AspNetCore.SignalR;
@using Newtonsoft.Json;
@using Newtonsoft.Json.Linq
@using System.Net.Http;
@using System.Net.Http.Headers;
@using System.Text;
Blazor LLM Service Test - by PINBlog
@if (!_isChatting)
{
Enter your name to start chatting:
// Error messages
@if (_message != null)
{
@_message
@_message
}
}
else
{
// banner to show current user
You are connected as @_username
// display messages
}
@code {
// flag to indicate chat status
private bool _isChatting = false;
// name of the user who will be chatting
private string _username;
private string _password;
// on-screen message
private string _message;
// new message input
private string _newMessage;
// list of messages in chat
private List _messages = new List();
private string _hubUrl;
private HubConnection _hubConnection;
private const string _modelname = "[🤖AI]";
private bool isTxtDisabled;
private bool isBtnDisabled;
private LLMService _llmService = new LLMService();
private List _chatHistory = new List();
public async Task Chat()
{
// check username is valid
if (string.IsNullOrWhiteSpace(_username))
{
_message = "Please enter a name";
return;
};
try
{
if(_password.CompareTo("password") != 0)
{
_message = "Password is different";
return;
}
// Start chatting and force refresh UI, ref: https://github.com/dotnet/aspnetcore/issues/22159
_isChatting = true;
await Task.Delay(1);
// remove old messages if any
_messages.Clear();
_chatHistory.Clear();
_chatHistory.Add(new History {
role = "system",
content = "You are an intelligent assistant. You always provide well-reasoned answers that are both correct and helpful." });
// Create the chat client
string baseUrl = navigationManager.BaseUri;
_hubUrl = baseUrl.TrimEnd('/') + BlazorChatSampleHub.HubUrl;
_hubConnection = new HubConnectionBuilder()
.WithUrl(_hubUrl)
.Build();
// 오류 메시지를 받을 수 있도록 이벤트를 추가합니다.
_hubConnection.On("Error", (errorMessage) =>
{
_message = $"ERROR: {errorMessage}";
_isChatting = false;
});
_hubConnection.On("Broadcast", BroadcastMessage);
await _hubConnection.StartAsync();
await SendAsync($"[Notice] {_username} joined chat room.");
}
catch (HubException e)
{
_message = $"ERROR: 채팅 클라이언트 시작 실패: {e.Message}";
_isChatting = false;
}
catch (Exception e)
{
_message = $"ERROR: Failed to start chat client: {e.Message}";
_isChatting = false;
}
}
private void BroadcastMessage(string name, string message)
{
if (name.CompareTo(_username) != 0 &&
name.CompareTo(_modelname) != 0)
{
DisconnectAsync();
return;
}
bool isMine = name.Equals(_username, StringComparison.OrdinalIgnoreCase);
_messages.Add(new Message(name, message, isMine));
// Inform blazor the UI needs updating
// StateHasChanged();
// UI 업데이트를 강제로 실행합니다.
InvokeAsync(StateHasChanged);
// UI 업데이트를 강제로 실행합니다. (SignalR 이벤트핸들러)
// InvokeAsync(() => StateHasChanged());
}
private async Task DisconnectAsync()
{
if (_isChatting)
{
await SendAsync($"[Notice] {_username} left chat room.");
await _hubConnection.StopAsync();
await _hubConnection.DisposeAsync();
_hubConnection = null;
_isChatting = false;
}
}
private async Task SendAsync(string message)
{
if (_isChatting && !string.IsNullOrWhiteSpace(message))
{
await _hubConnection.SendAsync("Broadcast", _username, message);
if (!message.StartsWith("[Notice]"))
{
var userMessage = new History { role = "user", content = message };
_chatHistory.Add(userMessage);
try
{
isTxtDisabled = true;
isBtnDisabled = true;
_message = "Generating a response ...";
var response = await _llmService.SendLLMMessageAsync(_chatHistory);
var objs = _llmService.ParseInputData(response).Result;
string fitSentence = string.Empty;
foreach (var data in objs)
{
if (data["choices"][0]["finish_reason"] != null &&
data["choices"][0]["finish_reason"].ToString().CompareTo("stop") == 0)
{
break;
}
fitSentence += data["choices"][0]["delta"]["content"].ToString();
}
await _hubConnection.SendAsync("Broadcast", _modelname, fitSentence);
var aiMessage = new History { role = "assistant", content = fitSentence };
_chatHistory.Add(aiMessage);
// fit contents
int max_context_length = 8192;
int cur_context_length = _chatHistory[0].content.Length; // system
for(int i=_chatHistory.Count-1; i>=0; i--)
{
var content = _llmService.ParseInputData(_chatHistory[i].content).Result;
string fitContent = string.Empty;
foreach (var data in content)
{
if (data["choices"][0]["finish_reason"] != null &&
data["choices"][0]["finish_reason"].ToString().CompareTo("stop") == 0)
{
break;
}
fitContent += data["choices"][0]["delta"]["content"].ToString();
}
cur_context_length += fitContent.Length;
if(cur_context_length > max_context_length)
{
_chatHistory.RemoveRange(1, i);
break;
}
}
}
catch (Exception ex)
{
_chatHistory.Add(new History { role = "error", content = $"Error: {ex.Message}" });
}
finally
{
isTxtDisabled = false;
isBtnDisabled = false;
_message = string.Empty;
}
}
_newMessage = string.Empty;
}
}
private class Message
{
public Message(string username, string body, bool mine)
{
Username = username;
Body = body;
Mine = mine;
}
public string Username { get; set; }
public string Body { get; set; }
public bool Mine { get; set; }
public bool IsNotice => Body.StartsWith("[Notice]");
public string CSS => Mine ? "sent" : "received";
}
private class History
{
public string role { get; set; }
public string content { get; set; }
}
private class LLMService
{
private readonly HttpClient _httpClient;
private const string ApiUrl = "";
// private const string ApiKey = "lm-studio";
public LLMService()
{
_httpClient = new HttpClient();
_httpClient.Timeout = TimeSpan.FromMinutes(5);
}
public async Task SendLLMMessageAsync(List messages)
{
var payload = new
{
model = "lmstudio-community/Meta-Llama-3-8B-Instruct-GGUF",
temperature = 0.8,
stream = true,
messages = messages
};
var content = new StringContent(JsonConvert.SerializeObject(payload), Encoding.UTF8, "application/json");
var response = await _httpClient.PostAsync(ApiUrl, content);
// response.EnsureSuccessStatusCode();
var responseString = await response.Content.ReadAsStringAsync();
return responseString;
}
public async Task> ParseInputData(string inputData)
{
List jsonObjects = new List();
// Split the input data by newlines and filter out empty lines
var lines = inputData.Split(new[] { '\n' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var line in lines)
{
string trimmedLine = line.Trim();
if (trimmedLine.StartsWith("data:"))
{
string jsonString = trimmedLine.Substring(5).Trim();
try
{
JObject jsonObject = JObject.Parse(jsonString);
jsonObjects.Add(jsonObject);
}
catch (JsonException ex)
{
Console.WriteLine($"Failed to parse JSON: {ex.Message}");
}
}
}
return jsonObjects;
}
}
}