This commit is contained in:
narawat lamaiin
2024-08-28 09:14:58 +07:00
parent 3eb6241051
commit 83315a747f
2 changed files with 166 additions and 14 deletions

View File

@@ -1,7 +1,7 @@
module interface module interface
export addNewMessage, conversation, decisionMaker, evaluator, reflector, generatechat export addNewMessage, conversation, decisionMaker, evaluator, reflector, generatechat,
# isterminal, generalconversation
using JSON3, DataStructures, Dates, UUIDs, HTTP, Random, MQTTClient, PrettyPrinting, Serialization using JSON3, DataStructures, Dates, UUIDs, HTTP, Random, MQTTClient, PrettyPrinting, Serialization
using GeneralUtils, LLMMCTS using GeneralUtils, LLMMCTS
@@ -816,6 +816,10 @@ end
# end # end
function generalconversation(a::T, userinput::Dict) where {T<:agent}
end
""" Chat with llm. """ Chat with llm.
@@ -869,7 +873,7 @@ julia> response = ChatAgent.conversation(newAgent, "Hi! how are you?")
# Signature # Signature
""" """
function conversation(a::T, userinput::Dict) where {T<:agent} function conversation(a::sommelier, userinput::Dict)
println("--> conver 1 ", @__FILE__, " ", @__LINE__) println("--> conver 1 ", @__FILE__, " ", @__LINE__)
# place holder # place holder
actionname = nothing actionname = nothing
@@ -902,7 +906,42 @@ function conversation(a::T, userinput::Dict) where {T<:agent}
end end
println("--> conver 4 ", @__FILE__, " ", @__LINE__) println("--> conver 4 ", @__FILE__, " ", @__LINE__)
# thought will be added to chat model via context # thought will be added to chat model via context
chatresponse = generatechat(a.memory, a.chathistory, a.text2textInstructLLM) chatresponse = generatechat(a)
addNewMessage(a, "assistant", chatresponse)
push!(a.memory[:events],
eventdict(;
event_description= "the assistant talks to the user.",
timestamp= Dates.now(),
subject= "assistant",
action_or_dialogue= chatresponse,
)
)
return chatresponse
end
end
function conversation(a::companion, userinput::Dict)
chatresponse = nothing
if userinput[:text] == "newtopic"
clearhistory(a)
return "Okay. What shall we talk about?"
else
# add usermsg to a.chathistory
addNewMessage(a, "user", userinput[:text])
println("--> conver 2 ", @__FILE__, " ", @__LINE__)
# add user activity to events memory
push!(a.memory[:events],
eventdict(;
event_description= "the user talks to the assistant.",
timestamp= Dates.now(),
subject= "user",
action_or_dialogue= userinput[:text],
)
)
chatresponse = generatechat(a)
addNewMessage(a, "assistant", chatresponse) addNewMessage(a, "assistant", chatresponse)
@@ -1015,10 +1054,10 @@ julia>
# Signature # Signature
""" """
function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::Function) function generatechat(a::sommelier)
systemmsg = systemmsg =
""" """
Your name is "Jenie". You are a helpful assistant acting as a polite, website-based sommelier for an online wine store. Your name is $(a.name). You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
You are currently talking with the user. You are currently talking with the user.
Your goal includes: Your goal includes:
1) Help the user select the best wines from your inventory that align with the user's preferences. 1) Help the user select the best wines from your inventory that align with the user's preferences.
@@ -1054,13 +1093,13 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
""" """
context = context =
if length(memory[:shortmem]) > 0 if length(a.memory[:shortmem]) > 0
vectorOfDictToText(memory[:shortmem], withkey=false) vectorOfDictToText(a.memory[:shortmem], withkey=false)
else else
"" ""
end end
chathistory = vectorOfDictToText(chathistory) chathistory = vectorOfDictToText(a.chathistory)
errornote = "" errornote = ""
response = nothing # placeholder for show when error msg show up response = nothing # placeholder for show when error msg show up
@@ -1069,7 +1108,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
""" """
Your ongoing conversation with the user: $chathistory Your ongoing conversation with the user: $chathistory
$context $context
Your thoughts: $(memory[:CHATBOX]) Your thoughts: $(a.memory[:CHATBOX])
$errornote $errornote
""" """
@@ -1087,7 +1126,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
""" """
try try
response = text2textInstructLLM(prompt) response = a.text2textInstructLLM(prompt)
responsedict = GeneralUtils.textToDict(response,["Mentioning_wine", "Chat"], responsedict = GeneralUtils.textToDict(response,["Mentioning_wine", "Chat"],
rightmarker=":", symbolkey=true, lowercasekey=true) rightmarker=":", symbolkey=true, lowercasekey=true)
@@ -1115,7 +1154,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
pprintln(Dict(responsedict)) pprintln(Dict(responsedict))
# check if LLM recommend wine before checking inventory # check if LLM recommend wine before checking inventory
isMemEmpty = isempty(memory[:shortmem]) isMemEmpty = isempty(a.memory[:shortmem])
if occursin("Yes", responsedict[:mentioning_wine]) && isMemEmpty if occursin("Yes", responsedict[:mentioning_wine]) && isMemEmpty
errornote = "Note: You can't recommend wines yet. You must check your inventory before recommending wine to the user." errornote = "Note: You can't recommend wines yet. You must check your inventory before recommending wine to the user."
error( "You must check your inventory before recommending wine") error( "You must check your inventory before recommending wine")
@@ -1128,7 +1167,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
errornote = "" errornote = ""
end end
memory[:CHATBOX] = "" # delete content because it no longer used. a.memory[:CHATBOX] = "" # delete content because it no longer used.
delete!(responsedict, :mentioning_wine) delete!(responsedict, :mentioning_wine)
result = responsedict[:chat] result = responsedict[:chat]
@@ -1146,6 +1185,69 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
error("generatechat failed to generate an evaluation") error("generatechat failed to generate an evaluation")
end end
function generatechat(a::companion)
systemmsg =
"""
Your name is $(a.name). You are a helpful assistant.
You are currently talking with the user.
Your goal includes:
1) Help the user as best as you can
Your responsibility includes:
1) Given the situation, help the user.
At each round of conversation, you will be given the current situation:
Your ongoing conversation with the user: ...
Context: ...
Let's begin!
"""
chathistory = vectorOfDictToText(a.chathistory)
response = nothing # placeholder for show when error msg show up
for attempt in 1:10
usermsg =
"""
Your ongoing conversation with the user: $chathistory
"""
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
try
response = a.text2textInstructLLM(prompt)
println("")
println("--> generatechat() ", @__FILE__, " ", @__LINE__)
pprintln(response)
result = response
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
end
end
error("generatechat failed to generate an evaluation")
end
function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::String function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::String
# systemmsg = # systemmsg =

View File

@@ -1,6 +1,6 @@
module type module type
export agent, sommelier export agent, sommelier, companion
using Dates, UUIDs, DataStructures, JSON3 using Dates, UUIDs, DataStructures, JSON3
using GeneralUtils using GeneralUtils
@@ -10,6 +10,56 @@ using GeneralUtils
abstract type agent end abstract type agent end
mutable struct companion <: agent
name::String # agent name
id::String # agent id
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
# communication function
text2textInstructLLM::Function
end
function companion(
text2textInstructLLM::Function
;
name::String= "Assistant",
id::String= string(uuid4()),
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> Vector{Dict{Symbol, String}}(),
:events=> Vector{Dict{Symbol, Any}}()
)
newAgent = companion(
name,
id,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM
)
return newAgent
end
""" A sommelier agent. """ A sommelier agent.
# Arguments # Arguments