This commit is contained in:
narawat lamaiin
2024-08-09 14:18:52 +07:00
parent 5228e2cfe1
commit debb7004d6
3 changed files with 269 additions and 117 deletions

View File

@@ -213,20 +213,20 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
systemmsg =
"""
You are a internet-based, polite sommelier working for an online wine store.
You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
You are currently talking with the user.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
Your current task is to decide what action to take so that you can achieve your goal.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
You are not responsible for sales.
At each round of conversation, you will be given the current situation:
Your ongoing conversation with the user: ...
I found the best matched wines from inventory: ...
Your conversation with the user: ...
Context: ...
You MUST follow the following guidelines:
- Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know which wines your store carries until you check your inventory.
- Use the "understand-then-check" inventory strategy to understand the user, as there are many wines in the inventory.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar.
- After the user chose the wine, end the conversation politely.
- After the user chose the wine, congratulate the user and end the conversation politely. Don't offer any extra services.
You should follow the following guidelines as you see fit:
- If the user interrupts, prioritize the user.
@@ -235,17 +235,18 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
- If you don't already know, find out the occasion for which the user is buying wine.
- If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity.
- If you don't already know, find out what food will be served with wine.
- If you haven't already, compare the wines and present them to the user.
You should then respond to the user with interleaving Thought, Plan, Action:
- thought:
1) State your reasoning about the current situation.
- plan: Based on the current situation, state a complete plan to complete the task. Be specific.
- action_name (Must be aligned with your plan): The name of the action which can be one of the following functions:
1) CHATBOX [input], which you can use to generate conversation in order to communicate with the user. The input is your intention for the talk. Be specific.
2) CHECKINVENTORY [input], which you can use to check info about wine in your inventory. The input is a search term in verbal English.
1) CHATBOX which you can use to generate conversation in order to communicate with the user. The input is your intention for the talk. Be specific.
2) CHECKINVENTORY which you can use to check info about wine in your inventory. The input is a search term in verbal English.
Good query example: black car, a stereo, 200 mile range, electric motor.
Good query example: How many car brand are from Asia?
- action_input: input details of the action
- action_input: input details of the action
- mentioning_wine: Are you mentioning specific wine name to the user? Can be "Yes" or "No"
You should only respond in format as described below:
@@ -260,10 +261,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
context =
if length(a.memory[:shortmem]) > 0
x = vectorOfDictToText(a.memory[:shortmem], withkey=false)
x = split(x, "More details:")
y = x[2]
"I have searched the inventory and this is what I found: $y"
vectorOfDictToText(a.memory[:shortmem], withkey=false)
else
""
end
@@ -302,7 +300,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
# action_name = string(split(responsedict[:action_name], '[')[1])
# end
if responsedict[:action_name] ["RECOMMEMDBOX", "CHATBOX", "CHECKINVENTORY"]
if responsedict[:action_name] ["PRESENTBOX", "CHATBOX", "CHECKINVENTORY"]
errornote = "You must use the given functions"
error("You must use the given functions ", @__FILE__, " ", @__LINE__)
end
@@ -329,6 +327,7 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
isMemEmpty = isempty(a.memory[:shortmem])
if occursin("Yes", responsedict[:mentioning_wine]) && isMemEmpty &&
responsedict[:action_name] != "CHECKINVENTORY"
errornote = "Note: You must check your inventory before recommending wine to the user."
error( "You must check your inventory before recommending wine")
else
@@ -337,10 +336,10 @@ function decisionMaker(a::T)::Dict{Symbol, Any} where {T<:agent}
delete!(responsedict, :mentioning_wine)
# if length(a.memory[:shortmem]) > 0 && responsedict[:action_name] != "RECOMMEMDBOX"
# errornote = "Note: You have found the best matched wines for the user. Use R them."
# error("found wines but not recommending")
# if length(a.memory[:shortmem]) > 0 && responsedict[:action_name] != "PRESENTBOX"
# responsedict[:action_name] = "PRESENTBOX"
# end
return responsedict
catch e
io = IOBuffer()
@@ -993,7 +992,7 @@ function conversation(a::T, userinput::Dict) where {T<:agent}
# use dummy memory to check generatechat() for halucination (checking inventory)
for i in 1:3
actionname, result = think(a)
if actionname == "CHATBOX"
if actionname == "CHATBOX" || actionname == "PRESENTBOX"
break
end
end
@@ -1036,8 +1035,6 @@ julia>
# TODO
- [] update docstring
- [x] implement the function
- [x] add try block. check result that it is expected before returning
# Signature
"""
@@ -1051,6 +1048,10 @@ function think(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}}
response =
if actionname == "CHATBOX"
(result=actioninput, errormsg=nothing, success=true)
elseif actionname == "PRESENTBOX"
(result=actioninput, errormsg=nothing, success=true)
# recommendbox(a, actioninput)
# (result="Compare and recommend wines to the user, providing reasons why each wine is a suitable match for their specific needs.", errormsg=nothing, success=true)
elseif actionname == "CHECKINVENTORY"
checkinventory(a, actioninput)
else
@@ -1065,10 +1066,16 @@ function think(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}}
errormsg::Union{AbstractString, Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing
success::Bool = haskey(response, :success) ? response[:success] : false
# manage memory
if actionname == "CHATBOX"
a.memory[:CHATBOX] = result
elseif actionname == "PRESENTBOX" # tell the generatechat()
a.memory[:CHATBOX] = result
elseif actionname == "CHECKINVENTORY"
x = "You have searched the inventory, this is what you found: $result"
push!(a.memory[:shortmem], Dict(Symbol(actionname)=> x))
else
push!(a.memory[:shortmem], Dict(Symbol(actionname)=> result))
error("condition is not defined")
end
return (actionname=actionname, result=result)
@@ -1076,7 +1083,7 @@ end
""" Force to think and check inventory
[TESTING]
"""
function forceInventoryCheck(a::T)::NamedTuple{(:actionname, :result), Tuple{String, String}} where {T<:agent}
println("--> forceInventoryCheck()")
@@ -1103,9 +1110,9 @@ function forceInventoryCheck(a::T)::NamedTuple{(:actionname, :result), Tuple{Str
return (actionname=actionname, result=result)
end
"""
[TESTING]
"""
function thinkCheckInventory(a::T)::Dict{Symbol, Any} where {T<:agent}
systemmsg =
@@ -1224,14 +1231,21 @@ julia>
function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::Function)
systemmsg =
"""
You are a website-based, polite sommelier working for an online wine store.
You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
You are currently talking with the user.
Your task is to understand their preferences and then recommend the best wines from your inventory that match those preferences.
Your goal is to recommend the best wines from your inventory that match the user's preferences.
You are not responsible for sales.
At each round of conversation, you will be given the current situation:
Your conversation with the user: ...
Your thoughts: Your current thoughts in your mind
Context: ...
Your ongoing conversation with the user: ...
Your current thoughts in your mind: ...
You MUST follow the following guidelines:
- After the user chose the wine, congratulate the user and end the conversation politely. Don't offer any extra services.
You should follow the following guidelines:
- When recommending wines, compare each option and provide reasons why each one is a suitable match for the user's specific needs.
You should then respond to the user with:
- chat: Your conversation with the user according to your thoughts.
@@ -1246,10 +1260,7 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
context =
if length(memory[:shortmem]) > 0
x = vectorOfDictToText(memory[:shortmem], withkey=false)
x = split(x, "More details:")
y = x[2]
"I have searched the inventory and this is what I found: $y"
vectorOfDictToText(memory[:shortmem], withkey=false)
else
""
end
@@ -1331,89 +1342,89 @@ function generatechat(memory::Dict, chathistory::Vector, text2textInstructLLM::F
end
# function generatequestion(a, text2textInstructLLM::Function)::String
function generatequestion(a, text2textInstructLLM::Function)::String
# systemmsg =
# """
# You are a helpful sommelier that generate multiple questions about the current situation.
systemmsg =
"""
You are a helpful assistant that generate multiple questions about the current situation.
# At each round of conversation, you will be given the current situation:
# User query: What's the user preferences about wine?
# Your work progress: ...
At each round of conversation, you will be given the current situation:
User query: What's the user preferences about wine?
Your work progress: ...
# You must follow the following guidelines:
# 1) Ask at least three questions but no more than five.
# 2) Your question should be specific, self-contained and not require any additional context.
# 3) Do not generate any question or comments at the end.
You must follow the following guidelines:
1) Ask at least three questions but no more than five.
2) Your question should be specific, self-contained and not require any additional context.
3) Do not generate any question or comments at the end.
# You should then respond to the user with:
# - Reasoning: State your detailed reasoning of the current situation
# - Q: Your question
# - A: Your answer to the question.
You should then respond to the user with:
- Reasoning: State your detailed reasoning of the current situation
- Q: Your question
- A: Your answer to the question.
# You must only respond in format as described below:
# Reasoning: ...
# Q 1: ...
# A 1: ...
# Q 2: ...
# A 2: ...
# Q 3: ...
# A 3: ...
# ...
You must only respond in format as described below:
Reasoning: ...
Q 1: ...
A 1: ...
Q 2: ...
A 2: ...
Q 3: ...
A 3: ...
...
# Let's begin!
# """
Let's begin!
"""
# workprogress = ""
# for (k, v) in state[:thoughtHistory]
# if k ∉ [:query]
# workprogress *= "$k: $v\n"
# end
# end
workprogress = ""
for (k, v) in state[:thoughtHistory]
if k [:query]
workprogress *= "$k: $v\n"
end
end
# usermsg =
# """
# $(context[:tablelist])
# User query: $(state[:thoughtHistory][:question])
# Your work progress: $workprogress
# """
usermsg =
"""
$(context[:tablelist])
User query: $(state[:thoughtHistory][:question])
Your work progress: $workprogress
"""
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt, "llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# response = nothing # store for show when error msg show up
# for attempt in 1:10
# try
# response = text2textInstructLLM(prompt)
# q_number = count("Q ", response)
# if q_number < 3
# error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
# end
# println("--> generatequestion ", @__FILE__, " ", @__LINE__)
# pprintln(response)
# return response
# catch e
# io = IOBuffer()
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st")
# println("")
# end
# end
# error("generatequestion failed to generate a thought ", response)
# end
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
response = nothing # store for show when error msg show up
for attempt in 1:10
try
response = text2textInstructLLM(prompt)
q_number = count("Q ", response)
if q_number < 3
error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
end
println("--> generatequestion ", @__FILE__, " ", @__LINE__)
pprintln(response)
return response
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
end
end
error("generatequestion failed to generate a thought ", response)
end

View File

@@ -1,6 +1,6 @@
module llmfunction
export virtualWineUserChatbox, jsoncorrection, checkinventory,
export virtualWineUserChatbox, jsoncorrection, checkinventory, # recommendbox,
virtualWineUserRecommendbox, userChatbox, userRecommendbox
using HTTP, JSON3, URIs, Random, PrettyPrinting, UUIDs
@@ -319,8 +319,7 @@ julia>
# Signature
"""
function extractWineAttributes_1(a::T1, input::T2
)::String where {T1<:agent, T2<:AbstractString}
function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
systemmsg =
"""
@@ -398,23 +397,28 @@ function extractWineAttributes_1(a::T1, input::T2
end
end
#[PENDING] check if grape_variety has more than 1 name
#[PENDING] check if the following attributes has more than 1 name
x = length(split(responsedict[:grape_variety], ",")) * length(split(responsedict[:grape_variety], "/"))
if x > 1
errornote = "only a single name in grape_variety is allowed"
errornote = "Note: You can put only one name in grape_variety."
error("only a single grape_variety name is allowed")
end
x = length(split(responsedict[:country], ",")) * length(split(responsedict[:country], "/"))
if x > 1
errornote = "only a single name in country is allowed"
errornote = "Note: You can put only one name in country."
error("only a single country name is allowed")
end
x = length(split(responsedict[:region], ",")) * length(split(responsedict[:region], "/"))
if x > 1
errornote = "only a single name in region is allowed"
errornote = "Note: You can put only one name in region."
error("only a single region name is allowed")
end
# check if grape_variety is mentioned in the input
if !occursin("NA", responsedict[:grape_variety]) && !occursin(responsedict[:grape_variety], input)
error("$(responsedict[:grape_variety]) is not mentioned in the input")
end
responsedict[:flavors] = replace(responsedict[:flavors], "notes"=>"")
delete!(responsedict, :reasoning)
delete!(responsedict, :tasting_notes)
@@ -457,8 +461,7 @@ end
# TODO
- [PENDING] "French dry white wines with medium bod" the LLM does not recognize sweetness. use LLM self questioning to solve.
"""
function extractWineAttributes_2(a::T1, input::T2
)::String where {T1<:agent, T2<:AbstractString}
function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
conversiontable =
"""
@@ -622,6 +625,144 @@ function extractWineAttributes_2(a::T1, input::T2
end
# function recommendbox(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
# error("recommendbox")
# systemmsg =
# """
# As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query.
# At each round of conversation, the user will give you the current situation:
# User's query: ...
# The preference form requires the following information:
# wine_type, price, occasion, food_to_be_paired_with_wine, country, grape_variety, flavors, aromas.
# You must follow the following guidelines:
# 1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'NA' to indicate this.
# Additionally, words like 'any' or 'unlimited' mean no information is available.
# 2) Use the conversion table to convert the descriptive word level of sweetness, intensity, tannin, and acidity into a corresponding integer.
# 3) Do not generate other comments.
# You should then respond to the user with the following points:
# - reasoning: State your understanding of the current situation
# - wine_type: Can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
# - price: Must be an integer representing the cost of the wine.
# - occasion: ...
# - food_to_be_paired_with_wine: food that the user will be served with wine
# - country: wine's country of origin
# - region: wine's region of origin such as Burgundy, Napa Valley
# - grape variety: a single name of grape used to make wine.
# - flavors: Names of items that the wine tastes like.
# - aromas: wine's aroma
# You should only respond in the form as described below:
# reasoning: ...
# wine_type: ...
# price: ...
# occasion: ...
# food_to_be_paired_with_wine: ...
# country: ...
# region: ...
# grape_variety: ...
# flavors: ...
# aromas: ...
# Let's begin!
# """
# attributes = ["reasoning", "wine_type", "price", "occasion", "food_to_be_paired_with_wine", "country", "region", "grape_variety", "flavors", "aromas"]
# errornote = ""
# for attempt in 1:5
# usermsg =
# """
# User's query: $input
# $errornote
# """
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt, "llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# try
# response = a.text2textInstructLLM(prompt)
# responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true)
# for i ∈ attributes
# if length(JSON3.write(responsedict[Symbol(i)])) == 0
# error("$i is empty ", @__LINE__)
# end
# end
# #[PENDING] check if the following attributes has more than 1 name
# x = length(split(responsedict[:grape_variety], ",")) * length(split(responsedict[:grape_variety], "/"))
# if x > 1
# errornote = "only a single name in grape_variety is allowed"
# error("only a single grape_variety name is allowed")
# end
# x = length(split(responsedict[:country], ",")) * length(split(responsedict[:country], "/"))
# if x > 1
# errornote = "only a single name in country is allowed"
# error("only a single country name is allowed")
# end
# x = length(split(responsedict[:region], ",")) * length(split(responsedict[:region], "/"))
# if x > 1
# errornote = "only a single name in region is allowed"
# error("only a single region name is allowed")
# end
# # check if grape_variety is mentioned in the input
# if responsedict[:grape_variety] != "NA" && !occursin(responsedict[:grape_variety], input)
# error("$(responsedict[:grape_variety]) is not mentioned in the input")
# end
# responsedict[:flavors] = replace(responsedict[:flavors], "notes"=>"")
# delete!(responsedict, :reasoning)
# delete!(responsedict, :tasting_notes)
# delete!(responsedict, :flavors)
# delete!(responsedict, :aromas)
# # remove (some text)
# for (k, v) in responsedict
# _v = replace(v, r"\(.*?\)" => "")
# responsedict[k] = _v
# end
# result = ""
# for (k, v) in responsedict
# # some time LLM generate text with "(some comment)". this line removes it
# if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
# result *= "$k: $v, "
# end
# end
# #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
# result = result[1:end-2] # remove the ending ", "
# return result
# catch e
# io = IOBuffer()
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st")
# println("")
# end
# end
# error("wineattributes_wordToNumber() failed to get a response")
# end
""" Attemp to correct LLM response's incorrect JSON response.
@@ -711,7 +852,8 @@ function jsoncorrection(config::T1, input::T2, correctJsonExample::T3;
end
end
# [WORKING] check whether
# function isrecommend(state::T1, text2textInstructLLM::Function
# ) where {T1<:AbstractDict}