This commit is contained in:
2025-01-04 16:10:23 +07:00
parent 82167fe006
commit cff0d31ae6
4 changed files with 349 additions and 443 deletions

View File

@@ -135,9 +135,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
timeline = "" timeline = ""
for (i, event) in enumerate(a.memory[:events][ind]) for (i, event) in enumerate(a.memory[:events][ind])
if event[:outcome] === nothing if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end end
end end
@@ -170,7 +170,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
You must follow the following guidelines: You must follow the following guidelines:
- Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory. - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory.
- All wines in your inventory are always in stock. - All wines in your inventory are always in stock.
- Before checking the inventory, engage in conversation to indirectly investigate the customer's intention, budget and preferences, which will significantly improve inventory search results. - Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
- Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- Medium and full-bodied red wines should not be paired with spicy foods. - Medium and full-bodied red wines should not be paired with spicy foods.
@@ -218,13 +218,16 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
for winename in winenames for winename in winenames
if !occursin(winename, chathistory) if !occursin(winename, chathistory)
println("\n~~~ Yiem decisionMaker() found wines from DB ", @__FILE__, " ", @__LINE__) println("\n~~~ Yiem decisionMaker() found wines from DB ", @__FILE__, " ", @__LINE__)
return Dict(:action_name=> "PRESENTBOX", d = Dict(
:action_input=> """ :understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.",
1) Provide detailed introductions of the wines you just found to the customer. :reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.",
2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience. :plan=> "1) Provide detailed introductions of the wines you just found to the customer.
3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience. 2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience.
4) Provide your personal recommendation based on your understanding of the customer's preferences. 3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience.
""") 4) Provide your personal recommendation based on your understanding of the customer's preferences.",
:action_name=> "PRESENTBOX",
:action_input=> "")
return d
end end
end end
end end
@@ -254,87 +257,95 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
<|start_header_id|>assistant<|end_header_id|> <|start_header_id|>assistant<|end_header_id|>
""" """
try response = a.func[:text2textInstructLLM](prompt)
response = a.func[:text2textInstructLLM](prompt) response = GeneralUtils.remove_french_accents(response)
response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response,
["Understanding", "Reasoning", "Plan", "Action_name", "Action_input"],
rightmarker=":", symbolkey=true, lowercasekey=true)
if responsedict[:action_name] ["CHATBOX", "PRESENTBOX", "CHECKINVENTORY", "ENDCONVERSATION"] # check if response contain more than one functions from ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions" count = 0
error("You must use the given functions ", @__FILE__, " ", @__LINE__) for i ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
if occursin(i, response)
count += 1
end end
end
if count > 1
errornote = "You must use only one function"
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end
for i [:understanding, :plan, :action_name] responsedict = GeneralUtils.textToDict(response,
if length(responsedict[i]) == 0 ["Understanding", "Reasoning", "Plan", "Action_name", "Action_input"],
error("$i is empty ", @__FILE__, " ", @__LINE__) rightmarker=":", symbolkey=true, lowercasekey=true)
end
if responsedict[:action_name] ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions"
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end
for i [:understanding, :plan, :action_name]
if length(responsedict[i]) == 0
error("$i is empty ", @__FILE__, " ", @__LINE__)
errornote = "$i is empty"
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end end
end
# check if there are more than 1 key per categories # check if there are more than 1 key per categories
for i [:understanding, :plan, :action_name, :action_input] for i [:understanding, :plan, :action_name, :action_input]
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
if length(matchkeys) > 1 if length(matchkeys) > 1
error("DecisionMaker has more than one key per categories") errornote = "DecisionMaker has more than one key per categories"
end println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end end
end
println("\n~~~ Yiem decisionMaker() ", @__FILE__, " ", @__LINE__) println("\n~~~ Yiem decisionMaker() ", @__FILE__, " ", @__LINE__)
pprintln(Dict(responsedict)) pprintln(Dict(responsedict))
# check whether an agent recommend wines before checking inventory or recommend wines # check whether an agent recommend wines before checking inventory or recommend wines
# outside its inventory # outside its inventory
# ask LLM whether there are any winery mentioned in the response # ask LLM whether there are any winery mentioned in the response
mentioned_winery = detectWineryName(a, response) mentioned_winery = detectWineryName(a, response)
if mentioned_winery != "None" if mentioned_winery != "None"
mentioned_winery = String.(strip.(split(mentioned_winery, ","))) mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
# check whether the wine is in event # check whether the wine is in event
isWineInEvent = false isWineInEvent = false
for winename in mentioned_winery for winename in mentioned_winery
for event in a.memory[:events] for event in a.memory[:events]
if event[:outcome] !== nothing && occursin(winename, event[:outcome]) if event[:outcome] !== nothing && occursin(winename, event[:outcome])
isWineInEvent = true isWineInEvent = true
break break
end
end end
end end
# if wine is mentioned but not in timeline or shortmem,
# then the agent is not supposed to recommend the wine
if responsedict[:action_name] == "CHATBOX" &&
isWineInEvent == false
errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first."
error("Before recommending a wine, ensure it's in your inventory. Check your stock first.")
end
end end
if occursin("--|", response) # if wine is mentioned but not in timeline or shortmem,
errornote = "Note: tables are not allowed. Do not include them your response." # then the agent is not supposed to recommend the wine
error("your response contain tables which is not allowed.") if responsedict[:action_name] == "CHATBOX" &&
isWineInEvent == false
errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first."
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end end
delete!(responsedict, :mentioned_winery)
# #CHANGE cache decision dict into vectorDB, this should be after new message is added to a.memory[:events]
# println("\n~~~ Do you want to cache decision dict? (y/n)")
# user_answer = readline()
# if user_answer == "y"
# timeline = timeline
# decisiondict = responsedict
# a.func[:insertSommelierDecision](timeline, decisiondict)
# end
return responsedict
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. \nError occurred: $errorMsg\n$st \nPrompt $prompt", @__FILE__, " ", @__LINE__)
end end
delete!(responsedict, :mentioned_winery)
# #CHANGE cache decision dict into vectorDB, this should be after new message is added to a.memory[:events]
# println("\n~~~ Do you want to cache decision dict? (y/n)")
# user_answer = readline()
# if user_answer == "y"
# timeline = timeline
# decisiondict = responsedict
# a.func[:insertSommelierDecision](timeline, decisiondict)
# end
return responsedict
end end
error("DecisionMaker failed to generate a thought ", response) error("DecisionMaker failed to generate a thought ", response)
end end
@@ -693,7 +704,7 @@ function conversation(a::sommelier, userinput::Dict)
event_description="the user talks to the assistant.", event_description="the user talks to the assistant.",
timestamp=Dates.now(), timestamp=Dates.now(),
subject="user", subject="user",
action_or_dialogue=userinput[:text], actioninput=userinput[:text],
) )
) )
@@ -729,7 +740,7 @@ function conversation(a::companion, userinput::Dict)
event_description="the user talks to the assistant.", event_description="the user talks to the assistant.",
timestamp=Dates.now(), timestamp=Dates.now(),
subject="user", subject="user",
action_or_dialogue=userinput[:text], actioninput=userinput[:text],
) )
) )
chatresponse = generatechat(a) chatresponse = generatechat(a)
@@ -741,7 +752,7 @@ function conversation(a::companion, userinput::Dict)
event_description="the assistant talks to the user.", event_description="the assistant talks to the user.",
timestamp=Dates.now(), timestamp=Dates.now(),
subject="assistant", subject="assistant",
action_or_dialogue=chatresponse, actioninput=chatresponse,
) )
) )
return chatresponse return chatresponse
@@ -775,8 +786,7 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
# map action and input() to llm function # map action and input() to llm function
response = response =
if actionname == "CHATBOX" if actionname == "CHATBOX"
input = thoughtDict[:action_input] (result=thoughtDict[:plan], errormsg=nothing, success=true)
(result=input, errormsg=nothing, success=true)
elseif actionname == "CHECKINVENTORY" elseif actionname == "CHECKINVENTORY"
checkinventory(a, actioninput) checkinventory(a, actioninput)
elseif actionname == "PRESENTBOX" elseif actionname == "PRESENTBOX"
@@ -797,16 +807,25 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
errormsg::Union{AbstractString,Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing errormsg::Union{AbstractString,Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing
success::Bool = haskey(response, :success) ? response[:success] : false success::Bool = haskey(response, :success) ? response[:success] : false
# manage memory (pass msg to generatechat) #[WORKING] manage memory (pass msg to generatechat)
if actionname ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"] if actionname ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"]
chatresponse = generatechat(a, result) chatresponse = generatechat(a, thoughtDict)
push!(a.memory[:events], push!(a.memory[:events],
eventdict(; eventdict(;
event_description="the assistant talks to the user.", event_description="the assistant talks to the user.",
timestamp=Dates.now(), timestamp=Dates.now(),
subject="assistant", subject="assistant",
action_or_dialogue=chatresponse, thought=thoughtDict,
actionname=actionname,
actioninput=chatresponse,
) )
# eventdict(;
# event_description="the assistant talks to the user.",
# timestamp=Dates.now(),
# subject="assistant",
# actioninput=chatresponse,
# )
) )
result = chatresponse result = chatresponse
if actionname == "PRESENTBOX" if actionname == "PRESENTBOX"
@@ -833,8 +852,10 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
event_description= "the assistant searched the database.", event_description= "the assistant searched the database.",
timestamp= Dates.now(), timestamp= Dates.now(),
subject= "assistant", subject= "assistant",
action_or_dialogue= "I searched the database with this query: $actioninput", thought=thoughtDict,
outcome= "This is what I found in the database, $result" actionname=actionname,
actioninput= "I searched the database with this query: $actioninput",
outcome= "This is what I've found in the database, $result"
) )
) )
else else
@@ -866,7 +887,7 @@ julia>
# Signature # Signature
""" """
function generatechat(a::sommelier, thought::T) where {T<:AbstractString} function generatechat(a::sommelier, thoughtDict)
systemmsg = systemmsg =
""" """
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for an online wine store. Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for an online wine store.
@@ -922,7 +943,7 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
usermsg = """ usermsg = """
Your ongoing conversation with the user: $chathistory Your ongoing conversation with the user: $chathistory
Contex: $context Contex: $context
Your thoughts: $thought Your thoughts: $(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])
$errornote $errornote
""" """
@@ -1016,28 +1037,29 @@ end
function generatechat(a::companion) function generatechat(a::companion)
systemmsg = systemmsg =
""" if a.systemmsg === nothing
Your name is $(a.name). You are a helpful assistant. systemmsg =
You are currently talking with the user. """
Your goal includes: You are a helpful assistant.
1) Help the user as best as you can You are currently talking with the user.
Your goal includes:
1) Help the user as best as you can
Your responsibility includes: At each round of conversation, you will be given the following information:
1) Given the situation, help the user. Your ongoing conversation with the user: ...
At each round of conversation, you will be given the current situation: You should then respond to the user with:
Your ongoing conversation with the user: ... 1) chat: Given the information, what would you say to the user?
Context: ...
You should then respond to the user with: You should only respond in JSON format as described below:
1) Chat: Given the situation, what would you say to the user? {"chat": ...}
You should only respond in format as described below: Let's begin!
Chat: ... """
else
Let's begin! a.systemmsg
""" end
chathistory = vectorOfDictToText(a.chathistory) chathistory = vectorOfDictToText(a.chathistory)
response = nothing # placeholder for show when error msg show up response = nothing # placeholder for show when error msg show up
@@ -1059,24 +1081,9 @@ function generatechat(a::companion)
<|start_header_id|>assistant<|end_header_id|> <|start_header_id|>assistant<|end_header_id|>
""" """
try response = a.text2textInstructLLM(prompt)
response = a.func[:text2textInstructLLM](prompt)
println("\n~~~ generatechat() ", @__FILE__, " ", @__LINE__)
pprintln(response)
responsedict = GeneralUtils.textToDict(response, ["Chat"], return response
rightmarker=":", symbolkey=true, lowercasekey=true)
result = responsedict[:chat]
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
end
end end
error("generatechat failed to generate a response") error("generatechat failed to generate a response")
end end
@@ -1185,9 +1192,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
timeline = "" timeline = ""
for (i, event) in enumerate(a.memory[:events][ind]) for (i, event) in enumerate(a.memory[:events][ind])
if event[:outcome] === nothing if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end end
end end
errornote = "" errornote = ""
@@ -1291,9 +1298,9 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
timeline = "" timeline = ""
for (i, event) in enumerate(events) for (i, event) in enumerate(events)
if event[:outcome] === nothing if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n" timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end end
end end

View File

@@ -1,7 +1,8 @@
module llmfunction module llmfunction
export virtualWineUserChatbox, jsoncorrection, checkinventory, # recommendbox, export virtualWineUserChatbox, jsoncorrection, checkinventory, # recommendbox,
virtualWineUserRecommendbox, userChatbox, userRecommendbox, extractWineAttributes_1 virtualWineUserRecommendbox, userChatbox, userRecommendbox, extractWineAttributes_1,
extractWineAttributes_2
using HTTP, JSON3, URIs, Random, PrettyPrinting, UUIDs, Dates using HTTP, JSON3, URIs, Random, PrettyPrinting, UUIDs, Dates
using GeneralUtils, SQLLLM using GeneralUtils, SQLLLM
@@ -550,9 +551,8 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
errornote = "" errornote = ""
maxattempt = 5
for attempt in 1:maxattempt for attempt in 1:5
usermsg = usermsg =
""" """
User's query: $input User's query: $input
@@ -572,70 +572,63 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
<|start_header_id|>assistant<|end_header_id|> <|start_header_id|>assistant<|end_header_id|>
""" """
try response = a.func[:text2textInstructLLM](prompt)
response = a.func[:text2textInstructLLM](prompt) response = GeneralUtils.remove_french_accents(response)
response = GeneralUtils.remove_french_accents(response)
# check wheter all attributes are in the response # check wheter all attributes are in the response
for word in attributes for word in attributes
if !occursin(word, response) if !occursin(word, response)
error("$word attribute is missing") errornote = "$word attribute is missing in previous attempts"
end println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end end
responsedict = copy(JSON3.read(response))
delete!(responsedict, :reasoning)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
# check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates
for i in [:grape_variety, :winery, :wine_name, :region]
content = responsedict[i]
if occursin(",", content)
content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling"
content = strip.(content)
else
content = [content]
end
for x in content
if !occursin("NA", responsedict[i]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
error(errornote)
end
end
end
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
result *= "$k: $v, "
end
end
#[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
result = result[1:end-2] # remove the ending ", "
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("")
end end
responsedict = copy(JSON3.read(response))
delete!(responsedict, :reasoning)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
# check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates
for i in [:grape_variety, :winery, :wine_name, :region]
content = responsedict[i]
if occursin(",", content)
content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling"
content = strip.(content)
else
content = [content]
end
for x in content
if !occursin("NA", responsedict[i]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end
end
end
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
result *= "$k: $v, "
end
end
#[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
result = result[1:end-2] # remove the ending ", "
return result
end end
error("wineattributes_wordToNumber() failed to get a response") error("wineattributes_wordToNumber() failed to get a response")
end end
@@ -643,6 +636,7 @@ end
""" """
# TODO # TODO
- [PENDING] "French dry white wines with medium bod" the LLM does not recognize sweetness. use LLM self questioning to solve. - [PENDING] "French dry white wines with medium bod" the LLM does not recognize sweetness. use LLM self questioning to solve.
- [PENDING] French Syrah, Viognier, under 100. LLM extract intensiry of 3-5. why?
""" """
function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString} function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
@@ -675,8 +669,6 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
4 to 5: May correspond to "high acidity" or a similar description. 4 to 5: May correspond to "high acidity" or a similar description.
""" """
# chathistory = vectorOfDictToText(a.chathistory)
systemmsg = systemmsg =
""" """
As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query. As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query.
@@ -695,254 +687,135 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
3) Do not generate other comments. 3) Do not generate other comments.
You should then respond to the user with the following points: You should then respond to the user with the following points:
- reasoning: State your understanding of the current situation - sweetness_keyword: The exact keywords in the user's query describing the sweetness level of the wine.
- sweetness: ( S ), where ( S ) represents integers indicating the range of sweetness levels. Example: 1-2 - sweetness: ( S ), where ( S ) represents integers indicating the range of sweetness levels. Example: 1-2
- acidity_keyword: The exact keywords in the user's query describing the acidity level of the wine.
- acidity: ( A ), where ( A ) represents integers indicating the range of acidity level. Example: 3-5 - acidity: ( A ), where ( A ) represents integers indicating the range of acidity level. Example: 3-5
- tannin_keyword: The exact keywords in the user's query describing the tannin level of the wine.
- tannin: ( T ), where ( T ) represents integers indicating the range of tannin level. Example: 1-3 - tannin: ( T ), where ( T ) represents integers indicating the range of tannin level. Example: 1-3
- intensity_keyword: The exact keywords in the user's query describing the intensity level of the wine.
- intensity: ( I ), where ( I ) represents integers indicating the range of intensity level. Example: 2-4 - intensity: ( I ), where ( I ) represents integers indicating the range of intensity level. Example: 2-4
- notes: Anything you want to add
You should only respond in the form as described below: You should only respond in the form (JSON) as described below:
reasoning: ... {
sweetness: ... "sweetness_keyword": ...,
acidity: ... "sweetness": ...,
tannin: ... "acidity_keyword": ...,
intensity: ... "acidity": ...,
notes: ... "tannin_keyword": ...,
"tannin": ...,
"intensity_keyword": ...,
"intensity": ...
}
Here are some examples:
User's query: I want a wine with a medium-bodied, low acidity, medium tannin.
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "low acidity",
"acidity": "1-2",
"tannin_keyword": "medium tannin",
"tannin": "3-4",
"intensity_keyword": "medium-bodied",
"intensity": "3-4"
}
User's query: German red wine, under 100, pairs with spicy food
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "NA",
"acidity": "NA",
"tannin_keyword": "NA",
"tannin": "NA",
"intensity_keyword": "NA",
"intensity": "NA"
}
Let's begin! Let's begin!
""" """
# chathistory = vectorOfDictToText(a.chathistory) errornote = ""
usermsg =
"""
$conversiontable
User's query: $input
"""
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
attributes = ["reasoning", "sweetness", "acidity", "tannin", "intensity", "notes"]
for attempt in 1:5 for attempt in 1:5
try usermsg =
response = a.func[:text2textInstructLLM](prompt) """
responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true) $conversiontable
User's query: $input
$errornote
"""
for i attributes _prompt =
if length(JSON3.write(responsedict[Symbol(i)])) == 0 [
error("$i is empty ", @__LINE__) Dict(:name=> "system", :text=> systemmsg),
end Dict(:name=> "user", :text=> usermsg)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
response = a.func[:text2textInstructLLM](prompt)
responsedict = copy(JSON3.read(response))
# check whether each describing keyword is in the input to prevent halucination
for i in ["sweetness", "acidity", "tannin", "intensity"]
keyword = Symbol(i * "_keyword") # e.g. sweetness_keyword
value = responsedict[keyword]
if value != "NA" && !occursin(value, input)
errornote = "WARNING. Keyword $keyword: $value does not appear in the input. You must use information from the input only"
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end end
delete!(responsedict, :reasoning) # if value == "NA" then responsedict[i] = "NA"
delete!(responsedict, :notes) # LLM traps. so it can add useless info here like comments. # e.g. if sweetness_keyword == "NA" then sweetness = "NA"
if value == "NA"
# some time LLM think the user mentioning acidity and tannin but actually didn't responsedict[Symbol(i)] = "NA"
for (k, v) in responsedict
if k [:acidity, :tannin] && !occursin(string(k), input)
responsedict[k] = "NA"
end
end end
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
# some time LLM not put integer range
for (k, v) in responsedict
responsedict[k] = v
if length(v) > 5
error("non-range is not allowed. $k $v")
end
end
# some time LLM says NA-2. Need to convert NA to 1
for (k, v) in responsedict
if occursin("NA", v) && occursin("-", v)
new_v = replace(v, "NA"=>"1")
responsedict[k] = new_v
end
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v)
result *= "$k: $v, "
end
end
result = result[1:end-2] # remove the ending ", "
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
end end
# some time LLM not put integer range
for (k, v) in responsedict
if !occursin("keyword", string(k))
if !occursin('-', v) || length(v) > 5
errornote = "WARNING: The non-range value for $k is not allowed. It should be specified in a range format, such as min-max."
println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__)
continue
end
end
end
# some time LLM says NA-2. Need to convert NA to 1
for (k, v) in responsedict
if occursin("NA", v) && occursin("-", v)
new_v = replace(v, "NA"=>"1")
responsedict[k] = new_v
end
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v)
result *= "$k: $v, "
end
end
result = result[1:end-2] # remove the ending ", "
return result
end end
error("wineattributes_wordToNumber() failed to get a response") error("wineattributes_wordToNumber() failed to get a response")
end end
# function recommendbox(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
# error("recommendbox")
# systemmsg =
# """
# As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query.
# At each round of conversation, the user will give you the current situation:
# User's query: ...
# The preference form requires the following information:
# wine_type, price, occasion, food_to_be_paired_with_wine, country, grape_variety, flavors, aromas.
# You must follow the following guidelines:
# 1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'NA' to indicate this.
# Additionally, words like 'any' or 'unlimited' mean no information is available.
# 2) Use the conversion table to convert the descriptive word level of sweetness, intensity, tannin, and acidity into a corresponding integer.
# 3) Do not generate other comments.
# You should then respond to the user with the following points:
# - reasoning: State your understanding of the current situation
# - wine_type: Can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
# - price: Must be an integer representing the cost of the wine.
# - occasion: ...
# - food_to_be_paired_with_wine: food that the user will be served with wine
# - country: wine's country of origin
# - region: wine's region of origin such as Burgundy, Napa Valley
# - grape variety: a single name of grape used to make wine.
# - flavors: Names of items that the wine tastes like.
# - aromas: wine's aroma
# You should only respond in the form as described below:
# reasoning: ...
# wine_type: ...
# price: ...
# occasion: ...
# food_to_be_paired_with_wine: ...
# country: ...
# region: ...
# grape_variety: ...
# flavors: ...
# aromas: ...
# Let's begin!
# """
# attributes = ["reasoning", "wine_type", "price", "occasion", "food_to_be_paired_with_wine", "country", "region", "grape_variety", "flavors", "aromas"]
# errornote = ""
# for attempt in 1:5
# usermsg =
# """
# User's query: $input
# $errornote
# """
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# try
# response = a.func[:text2textInstructLLM](prompt)
# responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true)
# for i ∈ attributes
# if length(JSON3.write(responsedict[Symbol(i)])) == 0
# error("$i is empty ", @__LINE__)
# end
# end
# #[PENDING] check if the following attributes has more than 1 name
# x = length(split(responsedict[:grape_variety], ",")) * length(split(responsedict[:grape_variety], "/"))
# if x > 1
# errornote = "only a single name in grape_variety is allowed"
# error("only a single grape_variety name is allowed")
# end
# x = length(split(responsedict[:country], ",")) * length(split(responsedict[:country], "/"))
# if x > 1
# errornote = "only a single name in country is allowed"
# error("only a single country name is allowed")
# end
# x = length(split(responsedict[:region], ",")) * length(split(responsedict[:region], "/"))
# if x > 1
# errornote = "only a single name in region is allowed"
# error("only a single region name is allowed")
# end
# # check if grape_variety is mentioned in the input
# if responsedict[:grape_variety] != "NA" && !occursin(responsedict[:grape_variety], input)
# error("$(responsedict[:grape_variety]) is not mentioned in the input")
# end
# responsedict[:flavors] = replace(responsedict[:flavors], "notes"=>"")
# delete!(responsedict, :reasoning)
# delete!(responsedict, :tasting_notes)
# delete!(responsedict, :flavors)
# delete!(responsedict, :aromas)
# # remove (some text)
# for (k, v) in responsedict
# _v = replace(v, r"\(.*?\)" => "")
# responsedict[k] = _v
# end
# result = ""
# for (k, v) in responsedict
# # some time LLM generate text with "(some comment)". this line removes it
# if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
# result *= "$k: $v, "
# end
# end
# #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
# result = result[1:end-2] # remove the ending ", "
# return result
# catch e
# io = IOBuffer()
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st")
# println("")
# end
# end
# error("wineattributes_wordToNumber() failed to get a response")
# end
""" Attemp to correct LLM response's incorrect JSON response. """ Attemp to correct LLM response's incorrect JSON response.
# Arguments # Arguments

View File

@@ -11,8 +11,8 @@ abstract type agent end
mutable struct companion <: agent mutable struct companion <: agent
name::String # agent name
id::String # agent id id::String # agent id
systemmsg::Union{String, Nothing}
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory """ Memory
@@ -34,8 +34,8 @@ end
function companion( function companion(
text2textInstructLLM::Function text2textInstructLLM::Function
; ;
name::String= "Assistant",
id::String= string(uuid4()), id::String= string(uuid4()),
systemmsg::Union{String, Nothing}= nothing,
maxHistoryMsg::Integer= 20, maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(), chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
) )
@@ -48,13 +48,13 @@ function companion(
) )
newAgent = companion( newAgent = companion(
name, id,
id, systemmsg,
maxHistoryMsg, maxHistoryMsg,
chathistory, chathistory,
memory, memory,
text2textInstructLLM text2textInstructLLM
) )
return newAgent return newAgent
end end
@@ -146,7 +146,6 @@ mutable struct sommelier <: agent
""" """
chathistory::Vector{Dict{Symbol, Any}} chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any} memory::Dict{Symbol, Any}
func # NamedTuple of functions func # NamedTuple of functions
end end
@@ -179,14 +178,14 @@ function sommelier(
# ), # ),
) )
memory = Dict{Symbol, Any}( memory = Dict{Symbol, Any}(
:chatbox=> "", :chatbox=> "",
:shortmem=> OrderedDict{Symbol, Any}(), :shortmem=> OrderedDict{Symbol, Any}(),
:events=> Vector{Dict{Symbol, Any}}(), :events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}( :state=> Dict{Symbol, Any}(
:wine_presented_to_user=> "None", :wine_presented_to_user=> "None",
), ),
) )
newAgent = sommelier( newAgent = sommelier(
name, name,

View File

@@ -169,11 +169,38 @@ function vectorOfDictToText(vecd::Vector; withkey=true)::String
end end
# function eventdict(;
# event_description::Union{String, Nothing}=nothing,
# timestamp::Union{DateTime, Nothing}=nothing,
# subject::Union{String, Nothing}=nothing,
# action_or_dialogue::Union{String, Nothing}=nothing,
# location::Union{String, Nothing}=nothing,
# equipment_used::Union{String, Nothing}=nothing,
# material_used::Union{String, Nothing}=nothing,
# outcome::Union{String, Nothing}=nothing,
# note::Union{String, Nothing}=nothing,
# )
# return Dict{Symbol, Any}(
# :event_description=> event_description,
# :timestamp=> timestamp,
# :subject=> subject,
# :action_or_dialogue=> action_or_dialogue,
# :location=> location,
# :equipment_used=> equipment_used,
# :material_used=> material_used,
# :outcome=> outcome,
# :note=> note,
# )
# end
function eventdict(; function eventdict(;
event_description::Union{String, Nothing}=nothing, event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing, timestamp::Union{DateTime, Nothing}=nothing,
subject::Union{String, Nothing}=nothing, subject::Union{String, Nothing}=nothing,
action_or_dialogue::Union{String, Nothing}=nothing, thought::Union{AbstractDict, Nothing}=nothing,
actionname::Union{String, Nothing}=nothing, # "CHAT", "CHECKINVENTORY", "PRESENTBOX", etc
actioninput::Union{String, Nothing}=nothing,
location::Union{String, Nothing}=nothing, location::Union{String, Nothing}=nothing,
equipment_used::Union{String, Nothing}=nothing, equipment_used::Union{String, Nothing}=nothing,
material_used::Union{String, Nothing}=nothing, material_used::Union{String, Nothing}=nothing,
@@ -184,7 +211,9 @@ function eventdict(;
:event_description=> event_description, :event_description=> event_description,
:timestamp=> timestamp, :timestamp=> timestamp,
:subject=> subject, :subject=> subject,
:action_or_dialogue=> action_or_dialogue, :thought=> thought,
:actionname=> actionname,
:actioninput=> actioninput,
:location=> location, :location=> location,
:equipment_used=> equipment_used, :equipment_used=> equipment_used,
:material_used=> material_used, :material_used=> material_used,
@@ -193,8 +222,6 @@ function eventdict(;
) )
end end
# """ Convert a single chat dictionary into LLM model instruct format. # """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example # # Llama 3 instruct format example