This commit is contained in:
narawat lamaiin
2024-08-09 14:18:52 +07:00
parent 5228e2cfe1
commit debb7004d6
3 changed files with 269 additions and 117 deletions

View File

@@ -213,20 +213,20 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
systemmsg =
"""
You are a internet-based, polite sommelier working for an online wine store.
You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
You are currently talking with the user.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
Your current task is to decide what action to take so that you can achieve your goal.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
You are not responsible for sales.
At each round of conversation, you will be given the current situation:
Your ongoing conversation with the user: ...
I found the best matched wines from inventory: ...
Your conversation with the user: ...
Context: ...
You MUST follow the following guidelines:
- Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know which wines your store carries until you check your inventory.
- Use the "understand-then-check" inventory strategy to understand the user, as there are many wines in the inventory.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar.
- After the user chose the wine, end the conversation politely.
- After the user chose the wine, congratulate the user and end the conversation politely. Don't offer any extra services.
You should follow the following guidelines as you see fit:
- If the user interrupts, prioritize the user.
@@ -235,17 +235,18 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
- If you don't already know, find out the occasion for which the user is buying wine.
- If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity.
- If you don't already know, find out what food will be served with wine.
- If you haven't already, compare the wines and present them to the user.
You should then respond to the user with interleaving Thought, Plan, Action:
- thought:
1) State your reasoning about the current situation.
- plan: Based on the current situation, state a complete plan to complete the task. Be specific.
- action_name (Must be aligned with your plan): The name of the action which can be one of the following functions:
1) CHATBOX [input], which you can use to generate conversation in order to communicate with the user. The input is your intention for the talk. Be specific.
2) CHECKINVENTORY [input], which you can use to check info about wine in your inventory. The input is a search term in verbal English.
1) CHATBOX which you can use to generate conversation in order to communicate with the user. The input is your intention for the talk. Be specific.
2) CHECKINVENTORY which you can use to check info about wine in your inventory. The input is a search term in verbal English.
Good query example: black car, a stereo, 200 mile range, electric motor.
Good query example: How many car brand are from Asia?
- action_input: input details of the action
- action_input: input details of the action
- mentioning_wine: Are you mentioning specific wine name to the user? Can be "Yes" or "No"
You should only respond in format as described below:
@@ -260,10 +261,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
context =
if length(a.memory[:shortmem]) > 0
x = vectorOfDictToText(a.memory[:shortmem], withkey=false)
x = split(x, "More details:")
y = x[2]
"I have searched the inventory and this is what I found: $y"
vectorOfDictToText(a.memory[:shortmem], withkey=false)
else
""
end
@@ -302,7 +300,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
# action_name = string(split(responsedict[:action_name], '[')[1])
# end
if responsedict[:action_name] ["RECOMMEMDBOX", "CHATBOX", "CHECKINVENTORY"]
if responsedict[:action_name] ["PRESENTBOX", "CHATBOX", "CHECKINVENTORY"]
errornote = "You must use the given functions"
error("You must use the given functions ", @__FILE__, " ", @__LINE__)
end
@@ -329,6 +327,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
isMemEmpty = isempty(a.memory[:shortmem])
if occursin("Yes", responsedict[:mentioning_wine]) && isMemEmpty &&
responsedict[:action_name] != "CHECKINVENTORY"
errornote = "Note: You must check your inventory before recommending wine to the user."
error( "You must check your inventory before recommending wine")
else
@@ -337,10 +336,10 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
delete!(responsedict, :mentioning_wine)
# if length(a.memory[:shortmem]) > 0 && responsedict[:action_name] != "RECOMMEMDBOX"
# errornote = "Note: You have found the best matched wines for the user. Use R them."
# error("found wines but not recommending")
# if length(a.memory[:shortmem]) > 0 && responsedict[:action_name] != "PRESENTBOX"
# responsedict[:action_name] = "PRESENTBOX"
# end
return responsedict
catch e
io = IOBuffer()
@@ -993,7 +992,7 @@ function conversation(a::T, userinput::Dict) where {T<:agent}
# use dummy memory to check generatechat() for halucination (checking inventory)
for i in 1:3
actionname, result = think(a)
if actionname == "CHATBOX"
if actionname == "CHATBOX" || actionname == "PRESENTBOX"
break
end
end
@@ -1036,8 +1035,6 @@ julia>
# TODO
- [] update docstring
- [x] implement the function
- [x] add try block. check result that it is expected before returning
# Signature
"""
@@ -1051,6 +1048,10 @@ function think(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}}
response =
if actionname == "CHATBOX"
(result=actioninput, errormsg=nothing, success=true)
elseif actionname == "PRESENTBOX"
(result=actioninput, errormsg=nothing, success=true)
# recommendbox(a, actioninput)
# (result="Compare and recommend wines to the user, providing reasons why each wine is a suitable match for their specific needs.", errormsg=nothing, success=true)
elseif actionname == "CHECKINVENTORY"
checkinventory(a, actioninput)
else
@@ -1065,10 +1066,16 @@ function think(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}}
errormsg::Union{AbstractString, Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing
success::Bool = haskey(response, :success) ? response[:success] : false
# manage memory
if actionname == "CHATBOX"
a.memory[:CHATBOX] = result
elseif actionname == "PRESENTBOX" # tell the generatechat()
a.memory[:CHATBOX] = result
elseif actionname == "CHECKINVENTORY"
x = "You have searched the inventory, this is what you found: $result"
push!(a.memory[:shortmem], Dict(Symbol(actionname)=> x))
else
push!(a.memory[:shortmem], Dict(Symbol(actionname)=> result))
error("condition is not defined")
end
return (actionname=actionname, result=result)
@@ -1076,7 +1083,7 @@ end
""" Force to think and check inventory
[TESTING]
"""
function forceInventoryCheck(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}} where {T<:agent}
println("--> forceInventoryCheck()")
@@ -1103,9 +1110,9 @@ function forceInventoryCheck(a::T)::NamedTuple{(:actionname, :result), Tuple{Str
return (actionname=actionname, result=result)
end
"""
[TESTING]
"""
function thinkCheckInventory(a::T)::Dict{Symbol, Any} where {T<:agent}
systemmsg =
@@ -1224,14 +1231,21 @@ julia>
function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::Function)
systemmsg =
"""
You are a website-based, polite sommelier working for an online wine store.
You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
You are currently talking with the user.
Your task is to understand their preferences and then recommend the best wines from your inventory that match those preferences.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
You are not responsible for sales.
At each round of conversation, you will be given the current situation:
Your conversation with the user: ...
Your thoughts: Your current thoughts in your mind
Context: ...
Your ongoing conversation with the user: ...
Your current thoughts in your mind: ...
You MUST follow the following guidelines:
- After the user chose the wine, congratulate the user and end the conversation politely. Don't offer any extra services.
You should follow the following guidelines:
- When recommending wines, compare each option and provide reasons why each one is a suitable match for the user's specific needs.
You should then respond to the user with:
- chat: Your conversation with the user according to your thoughts.
@@ -1246,10 +1260,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
context =
if length(memory[:shortmem]) > 0
x = vectorOfDictToText(memory[:shortmem], withkey=false)
x = split(x, "More details:")
y = x[2]
"I have searched the inventory and this is what I found: $y"
vectorOfDictToText(memory[:shortmem], withkey=false)
else
""
end
@@ -1331,89 +1342,89 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
end
# function generatequestion(a, text2textInstructLLM::Function)::String
function generatequestion(a, text2textInstructLLM::Function)::String
# systemmsg =
# """
# You are a helpful sommelier that generate multiple questions about the current situation.
systemmsg =
"""
You are a helpful assistant that generate multiple questions about the current situation.
# At each round of conversation, you will be given the current situation:
# User query: What's the user preferences about wine?
# Your work progress: ...
At each round of conversation, you will be given the current situation:
User query: What's the user preferences about wine?
Your work progress: ...
# You must follow the following guidelines:
# 1) Ask at least three questions but no more than five.
# 2) Your question should be specific, self-contained and not require any additional context.
# 3) Do not generate any question or comments at the end.
You must follow the following guidelines:
1) Ask at least three questions but no more than five.
2) Your question should be specific, self-contained and not require any additional context.
3) Do not generate any question or comments at the end.
# You should then respond to the user with:
# - Reasoning: State your detailed reasoning of the current situation
# - Q: Your question
# - A: Your answer to the question.
You should then respond to the user with:
- Reasoning: State your detailed reasoning of the current situation
- Q: Your question
- A: Your answer to the question.
# You must only respond in format as described below:
# Reasoning: ...
# Q 1: ...
# A 1: ...
# Q 2: ...
# A 2: ...
# Q 3: ...
# A 3: ...
# ...
You must only respond in format as described below:
Reasoning: ...
Q 1: ...
A 1: ...
Q 2: ...
A 2: ...
Q 3: ...
A 3: ...
...
# Let's begin!
# """
Let's begin!
"""
# workprogress = ""
# for (k, v) in state[:thoughtHistory]
# if k ∉ [:query]
# workprogress *= "$k: $v\n"
# end
# end
workprogress = ""
for (k, v) in state[:thoughtHistory]
if k [:query]
workprogress *= "$k: $v\n"
end
end
# usermsg =
# """
# $(context[:tablelist])
# User query: $(state[:thoughtHistory][:question])
# Your work progress: $workprogress
# """
usermsg =
"""
$(context[:tablelist])
User query: $(state[:thoughtHistory][:question])
Your work progress: $workprogress
"""
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt, "llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# response = nothing # store for show when error msg show up
# for attempt in 1:10
# try
# response = text2textInstructLLM(prompt)
# q_number = count("Q ", response)
# if q_number < 3
# error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
# end
# println("--> generatequestion ", @__FILE__, " ", @__LINE__)
# pprintln(response)
# return response
# catch e
# io = IOBuffer()
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st")
# println("")
# end
# end
# error("generatequestion failed to generate a thought ", response)
# end
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
response = nothing # store for show when error msg show up
for attempt in 1:10
try
response = text2textInstructLLM(prompt)
q_number = count("Q ", response)
if q_number < 3
error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
end
println("--> generatequestion ", @__FILE__, " ", @__LINE__)
pprintln(response)
return response
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
end
end
error("generatequestion failed to generate a thought ", response)
end