This commit is contained in:
narawat lamaiin
2024-08-28 10:40:17 +07:00
parent 83315a747f
commit 9f80e8359f
2 changed files with 30 additions and 10 deletions

View File

@@ -874,7 +874,7 @@ julia> response = ChatAgent.conversation(newAgent, "Hi! how are you?")
# Signature
"""
function conversation(a::sommelier, userinput::Dict)
println("--> conver 1 ", @__FILE__, " ", @__LINE__)
# place holder
actionname = nothing
result = nothing
@@ -886,7 +886,7 @@ function conversation(a::sommelier, userinput::Dict)
else
# add usermsg to a.chathistory
addNewMessage(a, "user", userinput[:text])
println("--> conver 2 ", @__FILE__, " ", @__LINE__)
# add user activity to events memory
push!(a.memory[:events],
eventdict(;
@@ -896,7 +896,7 @@ function conversation(a::sommelier, userinput::Dict)
action_or_dialogue= userinput[:text],
)
)
println("--> conver 3 ", @__FILE__, " ", @__LINE__)
# use dummy memory to check generatechat() for halucination (checking inventory)
for i in 1:3
actionname, result = think(a)
@@ -904,7 +904,7 @@ function conversation(a::sommelier, userinput::Dict)
break
end
end
println("--> conver 4 ", @__FILE__, " ", @__LINE__)
# thought will be added to chat model via context
chatresponse = generatechat(a)
@@ -931,7 +931,7 @@ function conversation(a::companion, userinput::Dict)
else
# add usermsg to a.chathistory
addNewMessage(a, "user", userinput[:text])
println("--> conver 2 ", @__FILE__, " ", @__LINE__)
# add user activity to events memory
push!(a.memory[:events],
eventdict(;
@@ -1200,16 +1200,24 @@ function generatechat(a::companion)
Your ongoing conversation with the user: ...
Context: ...
You should then respond to the user with:
1) Chat: Given the situation, what would you say to the user?
You should only respond in format as described below:
Chat: ...
Let's begin!
"""
chathistory = vectorOfDictToText(a.chathistory)
response = nothing # placeholder for show when error msg show up
noise = ""
for attempt in 1:10
usermsg =
"""
Your ongoing conversation with the user: $chathistory
$noise
"""
_prompt =
@@ -1227,19 +1235,22 @@ function generatechat(a::companion)
try
response = a.text2textInstructLLM(prompt)
println("")
println("--> generatechat() ", @__FILE__, " ", @__LINE__)
pprintln(response)
result = response
responsedict = GeneralUtils.textToDict(response,["Chat"],
rightmarker=":", symbolkey=true, lowercasekey=true)
return result
result = responsedict[:chat]
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
noise = noises(3, 5)
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
@@ -1248,6 +1259,7 @@ function generatechat(a::companion)
error("generatechat failed to generate an evaluation")
end
function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::String
# systemmsg =

View File

@@ -1,6 +1,6 @@
module util
export clearhistory, addNewMessage, vectorOfDictToText, eventdict
export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises
using UUIDs, Dates, DataStructures, HTTP, MQTTClient, JSON3
using GeneralUtils
@@ -181,7 +181,15 @@ function eventdict(;
end
noise(n::Integer) = String(rand('a':'z', n))
function noises(totalword::Integer, wordlength::Integer)
noises = ""
for i in 1:totalword
noises *= noise(wordlength) * " "
end
noises = strip(noises)
return noises
end
# """ Convert a single chat dictionary into LLM model instruct format.