Merge pull request 'WIP v0.1.2-dev' (#1) from v0.1.2-dev into main

Reviewed-on: #1
This commit is contained in:
ton
2025-01-25 07:30:18 +00:00
5 changed files with 703 additions and 683 deletions

View File

@@ -1,7 +1,7 @@
name = "YiemAgent"
uuid = "e012c34b-7f78-48e0-971c-7abb83b6f0a2"
authors = ["narawat lamaiin <narawat@outlook.com>"]
version = "0.1.1"
version = "0.1.2"
[deps]
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
@@ -22,6 +22,6 @@ UUIDs = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[compat]
DataFrames = "1.7.0"
GeneralUtils = "0.1.0"
GeneralUtils = "0.1, 0.2"
LLMMCTS = "0.1.2"
SQLLLM = "0.2.0"

View File

@@ -1,9 +1,10 @@
module interface
export addNewMessage, conversation, decisionMaker, evaluator, reflector, generatechat,
generalconversation, detectWineryName
generalconversation, detectWineryName, generateSituationReport
using JSON3, DataStructures, Dates, UUIDs, HTTP, Random, PrettyPrinting, Serialization
using JSON3, DataStructures, Dates, UUIDs, HTTP, Random, PrettyPrinting, Serialization,
DataFrames
using GeneralUtils
using ..type, ..util, ..llmfunction
@@ -132,17 +133,39 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
1:totalevents
end
timeline = ""
recentevents = ""
for (i, event) in enumerate(a.memory[:events][ind])
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n"
recentevents *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n"
recentevents *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end
end
#[TESTING] recap as caching
# query similar result from vectorDB
similarDecision = a.func[:similarSommelierDecision](timeline)
recapkeys = keys(a.memory[:recap])
_recapkeys_vec = [i for i in recapkeys]
# select recent keys
_recentRecapKeys =
if length(a.memory[:recap]) <= 3 # 1st message is a user's hello msg
_recapkeys_vec
elseif length(a.memory[:recap]) > 3
l = length(a.memory[:recap])
_recapkeys_vec[l-2:l]
end
# get recent recap
_recentrecap = OrderedDict()
for (k, v) in a.memory[:recap]
if k _recentRecapKeys
_recentrecap[k] = v
end
end
recentrecap = GeneralUtils.dictToString_noKey(_recentrecap)
similarDecision = a.func[:similarSommelierDecision](recentrecap)
if similarDecision !== nothing
responsedict = similarDecision
@@ -150,18 +173,20 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
else
systemmsg =
"""
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s online store.
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
Your goal includes:
1) Establish a connection with the customer by greeting them warmly
2) Help them select the best wines from your inventory that align with their preferences
2) Help them select the best wines only from your store's inventory that align with their preferences
Your responsibility includes:
1) Make an informed decision about what you need to do to achieve the goal
2) Thanks the user when they don't need any further assistance and invite them to comeback next time
Your responsibility do not include:
1) Asking or guiding the user to make a purchase
Your responsibility excludes:
1) Asking or guiding the user to make an order or purchase
2) Processing sales orders or engaging in any other sales-related activities
3) Answering questions beyond just recommendations.
4) Offering additional services beyond just recommendations.
At each round of conversation, you will be given the current situation:
Your recent events: latest 5 events of the situation
@@ -170,7 +195,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
You must follow the following guidelines:
- Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory.
- All wines in your inventory are always in stock.
- Before checking the inventory, engage in conversation to indirectly investigate the customer's intention, budget and preferences, which will significantly improve inventory search results.
- Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
- Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- Medium and full-bodied red wines should not be paired with spicy foods.
@@ -179,6 +204,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
- When searching an inventory, search as broadly as possible based on the information you have gathered so far.
- Encourage the customer to explore different options and try new things.
- Sometimes, the item a user desires might not be available in your inventory. In such cases, inform the user that the item is unavailable and suggest an alternative instead.
- If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store.
- Do not discuss other stores with the user except for your own.
For your information:
- vintage 0 means non-vintage.
@@ -193,7 +220,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
Can be one of the following functions:
- CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific.
- CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English.
Good query example: black car, a stereo, 200 mile range, electric motor.
Good query example: white wine, full-bodied, France, less than 2000 USD.
- ENDCONVERSATION which you can use when you believe the user has concluded their interaction, to properly end the conversation with them. Input is "NA".
5) Action_input: input of the action
@@ -207,28 +234,48 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
Let's begin!
"""
chathistory = vectorOfDictToText(a.chathistory)
chathistory = chatHistoryToText(a.chathistory)
# check if winename in shortmem occurred in chathistory. if not, skip decision and imediately use PRESENTBOX
if haskey(a.memory[:shortmem], :available_wine)
# check if wine name mentioned in timeline, only check first wine name is enough
if length(a.memory[:shortmem][:found_wine]) != 0
# check if wine name mentioned in recentevents, only check first wine name is enough
# because agent will recommend every wines it found each time.
df = a.memory[:shortmem][:available_wine]
winenames = df[:, :wine_name]
winenames = []
for wine in a.memory[:shortmem][:found_wine]
push!(winenames, wine["wine_name"])
end
for winename in winenames
if !occursin(winename, chathistory)
println("\n~~~ Yiem decisionMaker() found wines from DB ", @__FILE__, " ", @__LINE__)
return Dict(:action_name=> "PRESENTBOX",
:action_input=> """
1) Provide detailed introductions of the wines you just found to the customer.
2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience.
3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience.
4) Provide your personal recommendation based on your understanding of the customer's preferences.
""")
println("\n~~~ Yiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
d = Dict(
:understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.",
:reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.",
:plan=> "1) Provide detailed introductions of the wines you just found to the customer.
2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience.
3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience.
4) Provide your personal recommendation based on your understanding of the customer's preferences.",
:action_name=> "PRESENTBOX",
:action_input=> "")
a.memory[:shortmem][:found_wine] = [] # clear because PRESENTBOX command is issued. This is to prevent decisionMaker() keep presenting the same wines
return d
end
end
end
context = # may b add wine name instead of the hold wine data is better
if length(a.memory[:shortmem][:available_wine]) != 0
winenames = []
for (i, wine) in enumerate(a.memory[:shortmem][:available_wine])
name = "$i) $(wine["wine_name"]) "
push!(winenames, name)
end
availableWineName = join(winenames, ',')
"You found information about the following wines in your inventory: $availableWineName"
else
""
end
errornote = ""
response = nothing # placeholder for show when error msg show up
@@ -237,7 +284,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
usermsg =
"""
Your recent events: $timeline
$context
Your recent events: $recentevents
Your Q&A: $QandA)
$errornote
"""
@@ -254,87 +302,94 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
<|start_header_id|>assistant<|end_header_id|>
"""
try
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response,
["Understanding", "Reasoning", "Plan", "Action_name", "Action_input"],
rightmarker=":", symbolkey=true, lowercasekey=true)
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response)
response = replace(response, '*'=>"")
response = replace(response, "<|eot_id|>"=>"")
if responsedict[:action_name] ["CHATBOX", "PRESENTBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions"
error("You must use the given functions ", @__FILE__, " ", @__LINE__)
# check if response contain more than one functions from ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
count = 0
for i ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
if occursin(i, response)
count += 1
end
end
if count > 1
errornote = "You must use only one function"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
continue
end
for i [:understanding, :plan, :action_name]
if length(responsedict[i]) == 0
error("$i is empty ", @__FILE__, " ", @__LINE__)
end
responsedict = GeneralUtils.textToDict(response,
["Understanding", "Reasoning", "Plan", "Action_name", "Action_input"],
rightmarker=":", symbolkey=true, lowercasekey=true)
if responsedict[:action_name] ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
continue
end
checkFlag = false
for i [:understanding, :plan, :action_name]
if length(responsedict[i]) == 0
error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
errornote = "$i is empty"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag = true
break
end
end
checkFlag == true ? continue : nothing
# check if there are more than 1 key per categories
for i [:understanding, :plan, :action_name, :action_input]
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
if length(matchkeys) > 1
error("DecisionMaker has more than one key per categories")
end
# check if there are more than 1 key per categories
checkFlag = false
for i [:understanding, :plan, :action_name, :action_input]
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
if length(matchkeys) > 1
errornote = "DecisionMaker has more than one key per categories"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag = true
break
end
end
checkFlag == true ? continue : nothing
println("\n~~~ Yiem decisionMaker() ", @__FILE__, " ", @__LINE__)
pprintln(Dict(responsedict))
println("\n~~~ Yiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(Dict(responsedict))
# check whether an agent recommend wines before checking inventory or recommend wines
# outside its inventory
# ask LLM whether there are any winery mentioned in the response
mentioned_winery = detectWineryName(a, response)
if mentioned_winery != "None"
mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
# check whether an agent recommend wines before checking inventory or recommend wines
# outside its inventory
# ask LLM whether there are any winery mentioned in the response
mentioned_winery = detectWineryName(a, response)
if mentioned_winery != "None"
mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
# check whether the wine is in event
isWineInEvent = false
for winename in mentioned_winery
for event in a.memory[:events]
if event[:outcome] !== nothing && occursin(winename, event[:outcome])
isWineInEvent = true
break
end
# check whether the wine is in event
isWineInEvent = false
for winename in mentioned_winery
for event in a.memory[:events]
if event[:outcome] !== nothing && occursin(winename, event[:outcome])
isWineInEvent = true
break
end
end
# if wine is mentioned but not in timeline or shortmem,
# then the agent is not supposed to recommend the wine
if responsedict[:action_name] == "CHATBOX" &&
isWineInEvent == false
errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first."
error("Before recommending a wine, ensure it's in your inventory. Check your stock first.")
end
end
if occursin("--|", response)
errornote = "Note: tables are not allowed. Do not include them your response."
error("your response contain tables which is not allowed.")
# if wine is mentioned but not in timeline or shortmem,
# then the agent is not supposed to recommend the wine
if responsedict[:action_name] == "CHATBOX" &&
isWineInEvent == false
errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first."
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
continue
end
delete!(responsedict, :mentioned_winery)
# #CHANGE cache decision dict into vectorDB, this should be after new message is added to a.memory[:events]
# println("\n~~~ Do you want to cache decision dict? (y/n)")
# user_answer = readline()
# if user_answer == "y"
# timeline = timeline
# decisiondict = responsedict
# a.func[:insertSommelierDecision](timeline, decisiondict)
# end
return responsedict
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. \nError occurred: $errorMsg\n$st \nPrompt $prompt", @__FILE__, " ", @__LINE__)
end
delete!(responsedict, :mentioned_winery)
return responsedict
end
error("DecisionMaker failed to generate a thought ", response)
end
@@ -484,7 +539,7 @@ function evaluator(config::T1, state::T2
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
error("evaluator failed to generate an evaluation")
@@ -614,7 +669,7 @@ function reflector(config::T1, state::T2)::String where {T1<:AbstractDict,T2<:Ab
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
error("reflector failed to generate a thought")
@@ -673,19 +728,20 @@ julia> response = ChatAgent.conversation(newAgent, "Hi! how are you?")
# Signature
"""
function conversation(a::sommelier, userinput::Dict)
function conversation(a::sommelier, userinput::Dict; maximumMsg=50)
# place holder
actionname = nothing
result = nothing
chatresponse = nothing
userinput[:text] = GeneralUtils.remove_french_accents(userinput[:text])
if userinput[:text] == "newtopic"
clearhistory(a)
return "Okay. What shall we talk about?"
else
# add usermsg to a.chathistory
addNewMessage(a, "user", userinput[:text])
addNewMessage(a, "user", userinput[:text]; maximumMsg=maximumMsg)
# add user activity to events memory
push!(a.memory[:events],
@@ -693,27 +749,25 @@ function conversation(a::sommelier, userinput::Dict)
event_description="the user talks to the assistant.",
timestamp=Dates.now(),
subject="user",
action_or_dialogue=userinput[:text],
actioninput=userinput[:text],
)
)
# thinking loop until AI wants to communicate with the user
chatresponse = nothing
for i in 1:5
while chatresponse === nothing
actionname, result = think(a)
if actionname ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"]
chatresponse = result
break
end
end
addNewMessage(a, "assistant", chatresponse)
addNewMessage(a, "assistant", chatresponse; maximumMsg=maximumMsg)
return chatresponse
end
end
function conversation(a::companion, userinput::Dict)
function conversation(a::companion, userinput::Dict; maximumMsg=50)
chatresponse = nothing
if userinput[:text] == "newtopic"
@@ -721,7 +775,7 @@ function conversation(a::companion, userinput::Dict)
return "Okay. What shall we talk about?"
else
# add usermsg to a.chathistory
addNewMessage(a, "user", userinput[:text])
addNewMessage(a, "user", userinput[:text]; maximumMsg=maximumMsg)
# add user activity to events memory
push!(a.memory[:events],
@@ -729,19 +783,19 @@ function conversation(a::companion, userinput::Dict)
event_description="the user talks to the assistant.",
timestamp=Dates.now(),
subject="user",
action_or_dialogue=userinput[:text],
actioninput=userinput[:text],
)
)
chatresponse = generatechat(a)
addNewMessage(a, "assistant", chatresponse)
addNewMessage(a, "assistant", chatresponse; maximumMsg=maximumMsg)
push!(a.memory[:events],
eventdict(;
event_description="the assistant talks to the user.",
timestamp=Dates.now(),
subject="assistant",
action_or_dialogue=chatresponse,
actioninput=chatresponse,
)
)
return chatresponse
@@ -766,7 +820,7 @@ julia>
"""
function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} where {T<:agent}
a.memory[:recap] = generateSituationReport(a, a.func[:text2textInstructLLM]; skiprecent=3)
a.memory[:recap] = generateSituationReport(a, a.func[:text2textInstructLLM]; skiprecent=0)
thoughtDict = decisionMaker(a; recent=3)
actionname = thoughtDict[:action_name]
@@ -775,8 +829,7 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
# map action and input() to llm function
response =
if actionname == "CHATBOX"
input = thoughtDict[:action_input]
(result=input, errormsg=nothing, success=true)
(result=thoughtDict[:plan], errormsg=nothing, success=true)
elseif actionname == "CHECKINVENTORY"
checkinventory(a, actioninput)
elseif actionname == "PRESENTBOX"
@@ -799,49 +852,55 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
# manage memory (pass msg to generatechat)
if actionname ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"]
chatresponse = generatechat(a, result)
chatresponse = generatechat(a, thoughtDict)
push!(a.memory[:events],
eventdict(;
event_description="the assistant talks to the user.",
timestamp=Dates.now(),
subject="assistant",
action_or_dialogue=chatresponse,
thought=thoughtDict,
actionname=actionname,
actioninput=chatresponse,
)
# eventdict(;
# event_description="the assistant talks to the user.",
# timestamp=Dates.now(),
# subject="assistant",
# actioninput=chatresponse,
# )
)
result = chatresponse
if actionname == "PRESENTBOX"
df = a.memory[:shortmem][:available_wine]
winename = join(df[:, :wine_name], ", ")
if a.memory[:state][:wine_presented_to_user] == "None"
a.memory[:state][:wine_presented_to_user] = winename
else
a.memory[:state][:wine_presented_to_user] *= ", $winename"
end
end
elseif actionname == "CHECKINVENTORY"
if haskey(a.memory[:shortmem], :available_wine) # store wines in dataframe format
df = a.memory[:shortmem][:available_wine]
a.memory[:shortmem][:available_wine] = vcat(df, rawresponse)
elseif rawresponse !== nothing
a.memory[:shortmem][:available_wine] = rawresponse
if rawresponse !== nothing
vd = GeneralUtils.dfToVectorDict(rawresponse)
a.memory[:shortmem][:found_wine] = vd # used by decisionMaker() as a short note
if length(a.memory[:shortmem][:available_wine]) != 0
a.memory[:shortmem][:available_wine] = vcat(a.memory[:shortmem][:available_wine], vd)
else
a.memory[:shortmem][:available_wine] = vd
end
else
# skip, no result
println("checkinventory return nothing")
end
push!(a.memory[:events],
eventdict(;
event_description= "the assistant searched the database.",
timestamp= Dates.now(),
subject= "assistant",
action_or_dialogue= "I searched the database with this query: $actioninput",
outcome= "This is what I found in the database, $result"
thought=thoughtDict,
actionname=actionname,
actioninput= "I searched the database with this query: $actioninput",
outcome= "This is what I've found in the database, $result"
)
)
else
error("condition is not defined ", @__FILE__, " ", @__LINE__)
error("condition is not defined ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
return (actionname=actionname, result=result)
end
@@ -866,10 +925,10 @@ julia>
# Signature
"""
function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
function generatechat(a::sommelier, thoughtDict)
systemmsg =
"""
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for an online wine store.
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
You are currently talking with the user.
Your goal includes:
1) Help the user select the best wines from your inventory that align with the user's preferences.
@@ -877,9 +936,11 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
Your responsibility includes:
1) Given the situation, convey your thoughts to the user.
Your responsibility do not include:
1) Asking or guiding the user to make a purchase
2) Processing sales orders or engaging in any other sales-related activities
Your responsibility excludes:
1) Asking or guiding the user to make an order or purchase
2) Processing sales orders or engaging in any other sales-related activities
3) Answering questions beyond just recommendations.
4) Offering additional services beyond just recommendations.
At each round of conversation, you will be given the current situation:
Your ongoing conversation with the user: ...
@@ -892,7 +953,9 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
You should follow the following guidelines:
- Focus on the latest conversation.
- If the user interrupts, prioritize the user
- Be honest
- Medium and full-bodied red wines should not be paired with spicy foods.
- Do not discuss other stores with the user except for your own.
You should then respond to the user with:
1) Chat: Given the situation, How would you respond to the user to express your thoughts honestly and keep the conversation going smoothly?
@@ -906,15 +969,15 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
Let's begin!
"""
# a.memory[:shortmem][:available_wine] is a dataframe.
# a.memory[:shortmem][:available_wine] is a vector of dictionary
context =
if haskey(a.memory[:shortmem], :available_wine)
"Available wines $(GeneralUtils.dfToString(a.memory[:shortmem][:available_wine]))"
if length(a.memory[:shortmem][:available_wine]) != 0
"Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))"
else
"None"
"N/A"
end
chathistory = vectorOfDictToText(a.chathistory)
chathistory = chatHistoryToText(a.chathistory)
errornote = ""
response = nothing # placeholder for show when error msg show up
@@ -922,7 +985,7 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
usermsg = """
Your ongoing conversation with the user: $chathistory
Contex: $context
Your thoughts: $thought
Your thoughts: $(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])
$errornote
"""
@@ -939,21 +1002,24 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
"""
try
response_1 = a.func[:text2textInstructLLM](prompt)
response = a.func[:text2textInstructLLM](prompt)
# sometime the model response like this "here's how I would respond: ..."
if occursin("respond:", response_1)
if occursin("respond:", response)
errornote = "You don't need to intro your response"
error("generatechat() response contain : ", @__FILE__, " ", @__LINE__)
error("generatechat() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
response_2 = replace(response_1, '*' => "")
response_3 = replace(response_2, '$' => "USD")
response = replace(response_3, '`' => "")
response = GeneralUtils.remove_french_accents(response)
response = replace(response, '*'=>"")
response = replace(response, '$' => "USD")
response = replace(response, '`' => "")
response = replace(response, "<|eot_id|>"=>"")
response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response, ["Chat"],
rightmarker=":", symbolkey=true, lowercasekey=true)
for i [:chat]
if length(JSON3.write(responsedict[i])) == 0
error("$i is empty ", @__FILE__, " ", @__LINE__)
error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
@@ -970,7 +1036,7 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
error("Context: is in text. This is not allowed")
end
println("\n~~~ generatechat() ", @__FILE__, " ", @__LINE__)
println("\n~~~ generatechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(Dict(responsedict))
# check whether an agent recommend wines before checking inventory or recommend wines
@@ -995,8 +1061,8 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
# then the agent is not supposed to recommend the wine
if isWineInEvent == false
errornote = "Note: You are not supposed to recommend a wine that is not in your inventory."
error("Note: You are not supposed to recommend a wine that is not in your inventory.")
errornote = "Previously, You recommend wines that is not in your inventory which is not allowed."
error("Previously, You recommend wines that is not in your inventory which is not allowed.")
end
end
@@ -1008,7 +1074,7 @@ function generatechat(a::sommelier, thought::T) where {T<:AbstractString}
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
error("generatechat failed to generate a response")
@@ -1017,29 +1083,30 @@ end
function generatechat(a::companion)
systemmsg =
"""
Your name is $(a.name). You are a helpful assistant.
You are currently talking with the user.
Your goal includes:
1) Help the user as best as you can
if a.systemmsg === nothing
systemmsg =
"""
You are a helpful assistant.
You are currently talking with the user.
Your goal includes:
1) Help the user as best as you can
Your responsibility includes:
1) Given the situation, help the user.
At each round of conversation, you will be given the following information:
Your ongoing conversation with the user: ...
At each round of conversation, you will be given the current situation:
Your ongoing conversation with the user: ...
Context: ...
You should then respond to the user with:
1) chat: Given the information, what would you say to the user?
You should then respond to the user with:
1) Chat: Given the situation, what would you say to the user?
You should only respond in JSON format as described below:
{"chat": ...}
You should only respond in format as described below:
Chat: ...
Let's begin!
"""
else
a.systemmsg
end
Let's begin!
"""
chathistory = vectorOfDictToText(a.chathistory)
chathistory = chatHistoryToText(a.chathistory)
response = nothing # placeholder for show when error msg show up
for attempt in 1:10
@@ -1059,24 +1126,9 @@ function generatechat(a::companion)
<|start_header_id|>assistant<|end_header_id|>
"""
try
response = a.func[:text2textInstructLLM](prompt)
println("\n~~~ generatechat() ", @__FILE__, " ", @__LINE__)
pprintln(response)
response = a.text2textInstructLLM(prompt)
responsedict = GeneralUtils.textToDict(response, ["Chat"],
rightmarker=":", symbolkey=true, lowercasekey=true)
result = responsedict[:chat]
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
end
return response
end
error("generatechat failed to generate a response")
end
@@ -1096,9 +1148,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
Your responsibility does not include:
1) Processing sales orders or engaging in any other sales-related activities.
2) Answering questions and offering additional services beyond just recommendations.
At each round of conversation, you will be given the current situation:
Your status: your current status
Recap: recap of what has happened so far
Your recent events: latest 5 events of the situation
@@ -1115,6 +1167,13 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
- If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity
- If you don't already know, find out what food will be served with wine
- If you haven't already, introduce the wines you found in the database to the user first
- Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory.
- All wines in your inventory are always in stock.
- Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
- Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- Medium and full-bodied red wines should not be paired with spicy foods.
- If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store.
You should then respond to the user with:
1) Understanding:
@@ -1173,6 +1232,13 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
Let's begin!
"""
context =
if length(a.memory[:shortmem][:available_wine]) != 0
"Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))"
else
"N/A"
end
totalevents = length(a.memory[:events])
ind =
if totalevents > recent
@@ -1185,20 +1251,37 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
timeline = ""
for (i, event) in enumerate(a.memory[:events][ind])
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n"
timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n"
timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end
end
errornote = ""
response = nothing # store for show when error msg show up
recap =
if length(a.memory[:recap]) <= recent
"N/A"
else
recapkeys = keys(a.memory[:recap])
recapkeys_vec = [i for i in recapkeys]
recapkeys_vec = recapkeys_vec[1:end-recent]
tempmem = OrderedDict()
for (k, v) in a.memory[:recap]
if k recapkeys_vec
tempmem[k] = v
end
end
GeneralUtils.dictToString(tempmem)
end
for attempt in 1:10
usermsg =
"""
Your status: $(GeneralUtils.dict_to_string(a.memory[:state]))
Recap: $(a.memory[:recap])
Recap: $recap)
Your recent events: $timeline
Context: $context
$errornote
"""
@@ -1229,17 +1312,17 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
# check for valid response
q_atleast = length(a.memory[:events]) <= 2 ? 1 : 3
if q_number < q_atleast
error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
error("too few questions only $q_number questions are generated ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# check whether "A1" is in the response, if not error.
elseif !occursin("A1:", response)
error("no answer found in the response ", @__FILE__, " ", @__LINE__)
error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
responsedict = GeneralUtils.textToDict(response,
["Understanding", "Q1"],
rightmarker=":", symbolkey=true, lowercasekey=true)
response = "Q1: " * responsedict[:q1]
println("\n~~~ generatequestion ", @__FILE__, " ", @__LINE__)
println("\n~~~ generatequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response)
return response
catch e
@@ -1247,7 +1330,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
error("generatequestion failed to generate a response ", response)
@@ -1255,12 +1338,12 @@ end
function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::Integer=0
)::Dict
)::OrderedDict
systemmsg =
"""
You are an assistant being in the given events.
Your task is to writes a summary for each event in an ongoing, interleaving series.
Your task is to writes a summary for each event seperately into an ongoing, interleaving series.
At each round of conversation, you will be given the situation:
Total events: number of events you need to summarize.
@@ -1279,23 +1362,19 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
Event_1: The user ask me about where to buy a toy.
Event_2: I told the user to go to the store at 2nd floor.
Event_1: The user greets the assistant by saying 'hello'.
Event_2: The assistant respond warmly and inquire about how he can assist the user.
Let's begin!
"""
if length(a.memory[:events]) <= skiprecent
return Dict(:recap => "None")
return nothing
end
events = deepcopy(a.memory[:events][1:end-skiprecent])
events = a.memory[:events][1:end-skiprecent]
timeline = ""
for (i, event) in enumerate(events)
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue])\n"
else
timeline *= "$i) $(event[:subject])> $(event[:action_or_dialogue]) $(event[:outcome])\n"
end
end
timeline = createTimeline(a.memory[:events]; skiprecent=skiprecent)
errornote = ""
response = nothing # store for show when error msg show up
@@ -1320,19 +1399,18 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
"""
response = text2textInstructLLM(prompt)
# responsedict = GeneralUtils.textToDict(response,
# ["summary", "presented", "selected"],
# rightmarker=":", symbolkey=true)
println("\n~~~ generateSituationReport() ", @__FILE__, " ", @__LINE__)
eventheader = ["Event_$i" for i in eachindex(a.memory[:events])]
responsedict = GeneralUtils.textToDict(response, eventheader,
rightmarker=":", symbolkey=true)
println("\n~~~ generateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response)
return Dict(:recap => response)
return responsedict
end
error("generateSituationReport failed to generate a response ", response)
end
function detectWineryName(a, text)
systemmsg =
@@ -1379,7 +1457,7 @@ function detectWineryName(a, text)
try
response = a.func[:text2textInstructLLM](prompt)
println("\n~~~ detectWineryName() ", @__FILE__, " ", @__LINE__)
println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response)
responsedict = GeneralUtils.textToDict(response, ["winery_names"],
@@ -1393,7 +1471,7 @@ function detectWineryName(a, text)
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
end
error("detectWineryName failed to generate a response")

View File

@@ -1,7 +1,8 @@
module llmfunction
export virtualWineUserChatbox, jsoncorrection, checkinventory, # recommendbox,
virtualWineUserRecommendbox, userChatbox, userRecommendbox, extractWineAttributes_1
virtualWineUserRecommendbox, userChatbox, userRecommendbox, extractWineAttributes_1,
extractWineAttributes_2
using HTTP, JSON3, URIs, Random, PrettyPrinting, UUIDs, Dates
using GeneralUtils, SQLLLM
@@ -290,22 +291,24 @@ julia> result = checkinventory(agent, input)
function checkinventory(a::T1, input::T2
) where {T1<:agent, T2<:AbstractString}
println("\n~~~ checkinventory order: $input ", @__FILE__, " ", @__LINE__)
println("\n~~~ checkinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
wineattributes_1 = extractWineAttributes_1(a, input)
wineattributes_2 = extractWineAttributes_2(a, input)
_inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2"
inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}"
println("~~~ checkinventory input: $inventoryquery ", @__FILE__, " ", @__LINE__)
println("~~~ checkinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# add suppport for similarSQLVectorDB
textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL],
a.func[:text2textInstructLLM],
insertSQLVectorDB=a.func[:insertSQLVectorDB],
similarSQLVectorDB=a.func[:similarSQLVectorDB])
println("\n~~~ checkinventory result ", @__FILE__, " ", @__LINE__)
println("\n~~~ checkinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println(textresult)
#[WORKING] when rawresponse is nothing, AI get errors
return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing)
end
@@ -329,183 +332,6 @@ julia>
# Signature
"""
# function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
# systemmsg =
# """
# As a helpful sommelier, your task is to extract the user information from the user's query as much as possible to fill out user's preference form.
# At each round of conversation, the user will give you the current situation:
# User's query: ...
# You must follow the following guidelines:
# 1) If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this.
# Additionally, words like 'any' or 'unlimited' mean no information is available.
# 2) Do not generate other comments.
# You should then respond to the user with the following points:
# - reasoning: state your understanding of the current situation
# - wine_name: name of the wine
# - winery: name of the winery
# - vintage: the year of the wine
# - region: a region in a country where the wine is produced, such as Burgundy, Napa Valley, etc
# - country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
# - wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
# - grape_variety: the name of the primary grape used to make the wine
# - tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
# - wine_price: price of wine. For example, up to 100, less than 100, 20 to 100, 30-79.95
# - occasion: the occasion the user is having the wine for
# - food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
# You should only respond in the user's preference form as described below:
# reasoning: ...
# winery: ...
# wine_name: ...
# vintage: ...
# region: ...
# country: ...
# wine_type: ...
# grape_variety: ...
# tasting_notes: ...
# wine_price: ...
# occasion: ...
# food_to_be_paired_with_wine: ...
# Here are some example:
# User's query: red, Chenin Blanc, Riesling, under 20
# reasoning: ...
# winery: NA
# wine_name: NA
# vintage: NA
# region: NA
# country: NA
# wine_type: red
# grape_variety: Chenin Blanc, Riesling
# tasting_notes: NA
# wine_price: under 20
# occasion: NA
# food_to_be_paired_with_wine: NA
# User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc
# reasoning: ...
# winery: Domaine du Collier
# wine_name: Saumur Blanc
# vintage: 2019
# region: Saumur
# country: France
# wine_type: white
# grape_variety: Chenin Blanc
# tasting_notes: NA
# wine_price: 109
# occasion: NA
# food_to_be_paired_with_wine: NA
# Let's begin!
# """
# attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
# errornote = ""
# maxattempt = 5
# for attempt in 1:maxattempt
# usermsg =
# """
# User's query: $input
# $errornote
# """
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# try
# response = a.func[:text2textInstructLLM](prompt)
# response = GeneralUtils.remove_french_accents(response)
# # check wheter all attributes are in the response
# for word in attributes
# if !occursin(word, response)
# error("$word attribute is missing")
# end
# end
# responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true)
# for i ∈ attributes
# if length(JSON3.write(responsedict[Symbol(i)])) == 0
# error("$i is empty ", @__LINE__)
# end
# end
# #check if the following attributes has more than 1 name
# # responsedict[:grape_variety] = split(responsedict[:grape_variety], ',')[1]
# # responsedict[:grape_variety] = split(responsedict[:grape_variety], '/')[1]
# responsedict[:country] = split(responsedict[:country], ',')[1]
# responsedict[:country] = split(responsedict[:country], '/')[1]
# responsedict[:region] = split(responsedict[:region], ',')[1]
# responsedict[:region] = split(responsedict[:region], '/')[1]
# delete!(responsedict, :reasoning)
# delete!(responsedict, :tasting_notes)
# delete!(responsedict, :occasion)
# delete!(responsedict, :food_to_be_paired_with_wine)
# # check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates
# for i in [:grape_variety, :winery, :wine_name, :region]
# result = check_key_in_input(input, responsedict, attempt, maxattempt, i)
# if result === nothing
# # nothing wrong
# elseif result == "NA"
# responsedict[i] = "NA"
# else
# errornote = result
# error(errornote)
# end
# end
# # remove (some text)
# for (k, v) in responsedict
# _v = replace(v, r"\(.*?\)" => "")
# responsedict[k] = _v
# end
# result = ""
# for (k, v) in responsedict
# # some time LLM generate text with "(some comment)". this line removes it
# if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
# result *= "$k: $v, "
# end
# end
# #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
# result = result[1:end-2] # remove the ending ", "
# return result
# catch e
# io = IOBuffer()
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
# println("")
# end
# end
# error("wineattributes_wordToNumber() failed to get a response")
# end
function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
systemmsg =
@@ -528,31 +354,30 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
- region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc
- country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
- wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
- grape_variety: the name of the primary grape used to make the wine
- grape_varietal: the name of the primary grape used to make the wine
- tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
- wine_price: price of wine. For example, up to 100, less than 100, 20 to 100, 30-79.95
- wine_price: price range of wine.
- occasion: the occasion the user is having the wine for
- food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should only respond in the user's preference form (JSON) as described below:
{"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_variety": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...}
{"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_varietal": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...}
Here are some example:
User's query: red, Chenin Blanc, Riesling, under 20
{"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red", "grape_variety": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "under 20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
User's query: red, Chenin Blanc, Riesling, 20 USD
{"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red, white", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc
{"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_variety": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "109", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
{"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
Let's begin!
"""
attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
errornote = ""
maxattempt = 5
for attempt in 1:maxattempt
for attempt in 1:5
usermsg =
"""
User's query: $input
@@ -572,70 +397,98 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
<|start_header_id|>assistant<|end_header_id|>
"""
try
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response)
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response)
# check wheter all attributes are in the response
for word in attributes
if !occursin(word, response)
error("$word attribute is missing")
end
# check wheter all attributes are in the response
checkFlag = false
for word in attributes
if !occursin(word, response)
errornote = "$word attribute is missing in previous attempts"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag = true
break
end
end
checkFlag == true ? continue : nothing
responsedict = copy(JSON3.read(response))
responsedict = copy(JSON3.read(response))
delete!(responsedict, :reasoning)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
# convert
# check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates
for i in [:grape_variety, :winery, :wine_name, :region]
content = responsedict[i]
if occursin(",", content)
content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling"
content = strip.(content)
delete!(responsedict, :reasoning)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
println(@__FILE__, " ", @__LINE__)
pprintln(responsedict)
# check if winery, wine_name, region, country, wine_type, grape_varietal's value are in the query because sometime AI halucinates
checkFlag = false
for i in attributes
j = Symbol(i)
if j [:reasoning, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
# in case j is wine_price it needs to be checked differently because its value is ranged
if j == :wine_price
if responsedict[:wine_price] != "NA"
# check whether wine_price is in ranged number
if !occursin('-', responsedict[:wine_price])
errornote = "wine_price must be a range number"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag = true
break
end
# check whether max wine_price is in the input
maxprice = split(responsedict[:wine_price], '-')[end]
if !occursin(maxprice, input)
responsedict[:wine_price] = "NA"
end
end
else
content = [content]
end
content = responsedict[j]
if typeof(content) <: AbstractVector
content = strip.(content)
elseif occursin(',', content)
content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling"
content = strip.(content)
else
content = [content]
end
for x in content
if !occursin("NA", responsedict[i]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
error(errornote)
for x in content #BUG why x is "0-1500"
if !occursin("NA", responsedict[j]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag == true
break
end
end
end
end
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
result *= "$k: $v, "
end
end
#[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
result = result[1:end-2] # remove the ending ", "
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__)
println("")
end
checkFlag == true ? continue : nothing
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
result *= "$k: $v, "
end
end
#[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_varietal: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
result = result[1:end-2] # remove the ending ", "
return result
end
error("wineattributes_wordToNumber() failed to get a response")
end
@@ -643,6 +496,7 @@ end
"""
# TODO
- [PENDING] "French dry white wines with medium bod" the LLM does not recognize sweetness. use LLM self questioning to solve.
- [PENDING] French Syrah, Viognier, under 100. LLM extract intensiry of 3-5. why?
"""
function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
@@ -675,8 +529,6 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
4 to 5: May correspond to "high acidity" or a similar description.
"""
# chathistory = vectorOfDictToText(a.chathistory)
systemmsg =
"""
As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query.
@@ -695,238 +547,284 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
3) Do not generate other comments.
You should then respond to the user with the following points:
- reasoning: State your understanding of the current situation
- sweetness_keyword: The exact keywords in the user's query describing the sweetness level of the wine.
- sweetness: ( S ), where ( S ) represents integers indicating the range of sweetness levels. Example: 1-2
- acidity_keyword: The exact keywords in the user's query describing the acidity level of the wine.
- acidity: ( A ), where ( A ) represents integers indicating the range of acidity level. Example: 3-5
- tannin_keyword: The exact keywords in the user's query describing the tannin level of the wine.
- tannin: ( T ), where ( T ) represents integers indicating the range of tannin level. Example: 1-3
- intensity_keyword: The exact keywords in the user's query describing the intensity level of the wine.
- intensity: ( I ), where ( I ) represents integers indicating the range of intensity level. Example: 2-4
- notes: Anything you want to add
You should only respond in the form as described below:
reasoning: ...
sweetness: ...
acidity: ...
tannin: ...
intensity: ...
notes: ...
You should only respond in the form (JSON) as described below:
{
"sweetness_keyword": ...,
"sweetness": ...,
"acidity_keyword": ...,
"acidity": ...,
"tannin_keyword": ...,
"tannin": ...,
"intensity_keyword": ...,
"intensity": ...
}
Here are some examples:
User's query: I want a wine with a medium-bodied, low acidity, medium tannin.
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "low acidity",
"acidity": "1-2",
"tannin_keyword": "medium tannin",
"tannin": "3-4",
"intensity_keyword": "medium-bodied",
"intensity": "3-4"
}
User's query: German red wine, under 100, pairs with spicy food
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "NA",
"acidity": "NA",
"tannin_keyword": "NA",
"tannin": "NA",
"intensity_keyword": "NA",
"intensity": "NA"
}
Let's begin!
"""
# chathistory = vectorOfDictToText(a.chathistory)
usermsg =
"""
$conversiontable
User's query: $input
"""
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
attributes = ["reasoning", "sweetness", "acidity", "tannin", "intensity", "notes"]
errornote = ""
for attempt in 1:5
try
response = a.func[:text2textInstructLLM](prompt)
responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true)
usermsg =
"""
$conversiontable
User's query: $input
$errornote
"""
for i attributes
if length(JSON3.write(responsedict[Symbol(i)])) == 0
error("$i is empty ", @__LINE__)
end
_prompt =
[
Dict(:name=> "system", :text=> systemmsg),
Dict(:name=> "user", :text=> usermsg)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
response = a.func[:text2textInstructLLM](prompt)
responsedict = copy(JSON3.read(response))
# check whether each describing keyword is in the input to prevent halucination
for i in ["sweetness", "acidity", "tannin", "intensity"]
keyword = Symbol(i * "_keyword") # e.g. sweetness_keyword
value = responsedict[keyword]
if value != "NA" && !occursin(value, input)
errornote = "WARNING. Keyword $keyword: $value does not appear in the input. You must use information from the input only"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
continue
end
delete!(responsedict, :reasoning)
delete!(responsedict, :notes) # LLM traps. so it can add useless info here like comments.
# some time LLM think the user mentioning acidity and tannin but actually didn't
for (k, v) in responsedict
if k [:acidity, :tannin] && !occursin(string(k), input)
responsedict[k] = "NA"
end
# if value == "NA" then responsedict[i] = "NA"
# e.g. if sweetness_keyword == "NA" then sweetness = "NA"
if value == "NA"
responsedict[Symbol(i)] = "NA"
end
# remove (some text)
for (k, v) in responsedict
_v = replace(v, r"\(.*?\)" => "")
responsedict[k] = _v
end
# some time LLM not put integer range
for (k, v) in responsedict
responsedict[k] = v
if length(v) > 5
error("non-range is not allowed. $k $v")
end
end
# some time LLM says NA-2. Need to convert NA to 1
for (k, v) in responsedict
if occursin("NA", v) && occursin("-", v)
new_v = replace(v, "NA"=>"1")
responsedict[k] = new_v
end
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v)
result *= "$k: $v, "
end
end
result = result[1:end-2] # remove the ending ", "
return result
catch e
io = IOBuffer()
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("")
println("Attempt $attempt. Error occurred: $errorMsg\n$st")
println("")
end
# some time LLM not put integer range
for (k, v) in responsedict
if !occursin("keyword", string(k))
if v !== "NA" && (!occursin('-', v) || length(v) > 5)
errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max."
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
continue
end
end
end
# some time LLM says NA-2. Need to convert NA to 1
for (k, v) in responsedict
if occursin("NA", v) && occursin("-", v)
new_v = replace(v, "NA"=>"1")
responsedict[k] = new_v
end
end
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v)
result *= "$k: $v, "
end
end
result = result[1:end-2] # remove the ending ", "
return result
end
error("wineattributes_wordToNumber() failed to get a response")
end
# function recommendbox(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString}
# error("recommendbox")
# function concept(a::sommelier, thoughtDict)
# systemmsg =
# """
# As an helpful sommelier, your task is to fill out the user's preference form based on the corresponding words from the user's query.
# Your name: N/A
# Situation:
# - You are a helpful assistant
# Your vision:
# - This is a good opportunity to help the user
# Your mission:
# - To describe the concept of a conversation
# Mission's objective includes:
# - To
# Your responsibility includes:
# 1) Given the situation, convey your thoughts to the user.
# Your responsibility excludes:
# 1) Asking or guiding the user to make a purchase
# 2) Processing sales orders or engaging in any other sales-related activities
# 3) Answering questions and offering additional services beyond just recommendations, such as delivery, box, gift wrapping, personalized messages. Customers can reach out to our sales at the store.
# Your profile:
# - You are a young professional in a big company.
# - You are avid party goer
# - You like beer.
# - You know nothing about wine.
# - You have a budget of 1500usd.
# Additional information:
# - your boss like spicy food.
# - your boss is a middle-aged man.
# At each round of conversation, the user will give you the current situation:
# User's query: ...
# At each round of conversation, you will be given the following information:
# Your ongoing conversation with the user: ...
# Context: ...
# Your thoughts: Your current thoughts in your mind
# The preference form requires the following information:
# wine_type, price, occasion, food_to_be_paired_with_wine, country, grape_variety, flavors, aromas.
# You MUST follow the following guidelines:
# - Do not offer additional services you didn't thought.
# You must follow the following guidelines:
# 1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'NA' to indicate this.
# Additionally, words like 'any' or 'unlimited' mean no information is available.
# 2) Use the conversion table to convert the descriptive word level of sweetness, intensity, tannin, and acidity into a corresponding integer.
# 3) Do not generate other comments.
# You should follow the following guidelines:
# - Focus on the latest conversation.
# - If the user interrupts, prioritize the user
# - Medium and full-bodied red wines should not be paired with spicy foods.
# You should then respond to the user with the following points:
# - reasoning: State your understanding of the current situation
# - wine_type: Can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
# - price: Must be an integer representing the cost of the wine.
# - occasion: ...
# - food_to_be_paired_with_wine: food that the user will be served with wine
# - country: wine's country of origin
# - region: wine's region of origin such as Burgundy, Napa Valley
# - grape variety: a single name of grape used to make wine.
# - flavors: Names of items that the wine tastes like.
# - aromas: wine's aroma
# You should then respond to the user with:
# 1) Chat: Given the situation, How would you respond to the user to express your thoughts honestly and keep the conversation going smoothly?
# You should only respond in the form as described below:
# reasoning: ...
# wine_type: ...
# price: ...
# occasion: ...
# food_to_be_paired_with_wine: ...
# country: ...
# region: ...
# grape_variety: ...
# flavors: ...
# aromas: ...
# You should only respond in format as described below:
# Chat: ...
# Let's begin!
# Here are some examples of response format:
# Chat: "I see. Let me think about it. I'll get back to you with my recommendation."
# Let's begin!
# """
# attributes = ["reasoning", "wine_type", "price", "occasion", "food_to_be_paired_with_wine", "country", "region", "grape_variety", "flavors", "aromas"]
# errornote = ""
# for attempt in 1:5
# # a.memory[:shortmem][:available_wine] is a dataframe.
# context =
# if haskey(a.memory[:shortmem], :available_wine)
# "Available wines $(GeneralUtils.dfToString(a.memory[:shortmem][:available_wine]))"
# else
# "None"
# end
# usermsg =
# """
# User's query: $input
# $errornote
# """
# chathistory = vectorOfDictToText(a.chathistory)
# errornote = ""
# response = nothing # placeholder for show when error msg show up
# for attempt in 1:10
# usermsg = """
# Your ongoing conversation with the user: $chathistory
# Contex: $context
# Your thoughts: $(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])
# $errornote
# """
# _prompt =
# [
# Dict(:name=> "system", :text=> systemmsg),
# Dict(:name=> "user", :text=> usermsg)
# ]
# [
# Dict(:name => "system", :text => systemmsg),
# Dict(:name => "user", :text => usermsg)
# ]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
# """
# prompt *= """
# <|start_header_id|>assistant<|end_header_id|>
# """
# try
# response = a.func[:text2textInstructLLM](prompt)
# responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true)
# # sometime the model response like this "here's how I would respond: ..."
# if occursin("respond:", response)
# errornote = "You don't need to intro your response"
# error("generatechat() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# end
# response = GeneralUtils.remove_french_accents(response)
# response = replace(response, '*'=>"")
# response = replace(response, '$' => "USD")
# response = replace(response, '`' => "")
# response = GeneralUtils.remove_french_accents(response)
# responsedict = GeneralUtils.textToDict(response, ["Chat"],
# rightmarker=":", symbolkey=true, lowercasekey=true)
# for i ∈ attributes
# if length(JSON3.write(responsedict[Symbol(i)])) == 0
# error("$i is empty ", @__LINE__)
# for i ∈ [:chat]
# if length(JSON3.write(responsedict[i])) == 0
# error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# end
# end
# #[PENDING] check if the following attributes has more than 1 name
# x = length(split(responsedict[:grape_variety], ",")) * length(split(responsedict[:grape_variety], "/"))
# if x > 1
# errornote = "only a single name in grape_variety is allowed"
# error("only a single grape_variety name is allowed")
# end
# x = length(split(responsedict[:country], ",")) * length(split(responsedict[:country], "/"))
# if x > 1
# errornote = "only a single name in country is allowed"
# error("only a single country name is allowed")
# end
# x = length(split(responsedict[:region], ",")) * length(split(responsedict[:region], "/"))
# if x > 1
# errornote = "only a single name in region is allowed"
# error("only a single region name is allowed")
# end
# # check if grape_variety is mentioned in the input
# if responsedict[:grape_variety] != "NA" && !occursin(responsedict[:grape_variety], input)
# error("$(responsedict[:grape_variety]) is not mentioned in the input")
# end
# responsedict[:flavors] = replace(responsedict[:flavors], "notes"=>"")
# delete!(responsedict, :reasoning)
# delete!(responsedict, :tasting_notes)
# delete!(responsedict, :flavors)
# delete!(responsedict, :aromas)
# # remove (some text)
# for (k, v) in responsedict
# _v = replace(v, r"\(.*?\)" => "")
# responsedict[k] = _v
# end
# result = ""
# for (k, v) in responsedict
# # some time LLM generate text with "(some comment)". this line removes it
# if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
# result *= "$k: $v, "
# # check if there are more than 1 key per categories
# for i ∈ [:chat]
# matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
# if length(matchkeys) > 1
# error("generatechat has more than one key per categories")
# end
# end
# #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral"
# # check if Context: is in chat
# if occursin("Context:", responsedict[:chat])
# error("Context: is in text. This is not allowed")
# end
# result = result[1:end-2] # remove the ending ", "
# println("\n~~~ generatechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# pprintln(Dict(responsedict))
# # check whether an agent recommend wines before checking inventory or recommend wines
# # outside its inventory
# # ask LLM whether there are any winery mentioned in the response
# mentioned_winery = detectWineryName(a, responsedict[:chat])
# if mentioned_winery != "None"
# mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
# # check whether the wine is in event
# isWineInEvent = false
# for winename in mentioned_winery
# for event in a.memory[:events]
# if event[:outcome] !== nothing && occursin(winename, event[:outcome])
# isWineInEvent = true
# break
# end
# end
# end
# # if wine is mentioned but not in timeline or shortmem,
# # then the agent is not supposed to recommend the wine
# if isWineInEvent == false
# errornote = "Previously: You recommend a wine that is not in your inventory which is not allowed."
# error("Previously: You recommend a wine that is not in your inventory which is not allowed.")
# end
# end
# result = responsedict[:chat]
# return result
# catch e
@@ -934,15 +832,14 @@ end
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
# println("")
# println("Attempt $attempt. Error occurred: $errorMsg\n$st")
# println("")
# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# end
# end
# error("wineattributes_wordToNumber() failed to get a response")
# error("generatechat failed to generate a response")
# end
""" Attemp to correct LLM response's incorrect JSON response.
# Arguments
@@ -1133,7 +1030,7 @@ end
# state[:isterminal] = true
# state[:reward] = 1
# end
# println("--> 5 Evaluator ", @__FILE__, " ", @__LINE__)
# println("--> 5 Evaluator ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# pprintln(Dict(responsedict))
# return responsedict[:score]
# catch e

View File

@@ -11,8 +11,8 @@ abstract type agent end
mutable struct companion <: agent
name::String # agent name
id::String # agent id
systemmsg::Union{String, Nothing}
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
@@ -34,8 +34,8 @@ end
function companion(
text2textInstructLLM::Function
;
name::String= "Assistant",
id::String= string(uuid4()),
systemmsg::Union{String, Nothing}= nothing,
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
@@ -48,13 +48,13 @@ function companion(
)
newAgent = companion(
name,
id,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM
)
id,
systemmsg,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM
)
return newAgent
end
@@ -146,7 +146,6 @@ mutable struct sommelier <: agent
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
func # NamedTuple of functions
end
@@ -179,14 +178,17 @@ function sommelier(
# ),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> OrderedDict{Symbol, Any}(),
:events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}(
:wine_presented_to_user=> "None",
),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> OrderedDict{Symbol, Any}(
:available_wine=> [],
:found_wine=> [], # used by decisionMaker(). This is to prevent decisionMaker() keep presenting the same wines
),
:events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}(
),
:recap=> OrderedDict{Symbol, Any}(),
)
newAgent = sommelier(
name,

View File

@@ -1,6 +1,7 @@
module util
export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises
export clearhistory, addNewMessage, chatHistoryToText, eventdict, noises, createTimeline,
availableWineToText
using UUIDs, Dates, DataStructures, HTTP, JSON3
using GeneralUtils
@@ -106,7 +107,7 @@ function addNewMessage(a::T1, name::String, text::T2;
error("name is not in agent.availableRole $(@__LINE__)")
end
#[] summarize the oldest 10 message
#[WORKING] summarize the oldest 10 message
if length(a.chathistory) > maximumMsg
summarize(a.chathistory)
else
@@ -138,7 +139,7 @@ julia> GeneralUtils.vectorOfDictToText(vecd, withkey=true)
```
# Signature
"""
function vectorOfDictToText(vecd::Vector; withkey=true)::String
function chatHistoryToText(vecd::Vector; withkey=true)::String
# Initialize an empty string to hold the final text
text = ""
@@ -169,11 +170,34 @@ function vectorOfDictToText(vecd::Vector; withkey=true)::String
end
function availableWineToText(vecd::Vector)::String
# Initialize an empty string to hold the final text
rowtext = ""
# Loop through each dictionary in the input vector
for (i, d) in enumerate(vecd)
# Iterate over all key-value pairs in the dictionary
temp = []
for (k, v) in d
# Append the formatted string to the text variable
t = "$k:$v"
push!(temp, t)
end
_rowtext = join(temp, ',')
rowtext *= "$i) $_rowtext "
end
return rowtext
end
function eventdict(;
event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing,
subject::Union{String, Nothing}=nothing,
action_or_dialogue::Union{String, Nothing}=nothing,
thought::Union{AbstractDict, Nothing}=nothing,
actionname::Union{String, Nothing}=nothing, # "CHAT", "CHECKINVENTORY", "PRESENTBOX", etc
actioninput::Union{String, Nothing}=nothing,
location::Union{String, Nothing}=nothing,
equipment_used::Union{String, Nothing}=nothing,
material_used::Union{String, Nothing}=nothing,
@@ -184,7 +208,9 @@ function eventdict(;
:event_description=> event_description,
:timestamp=> timestamp,
:subject=> subject,
:action_or_dialogue=> action_or_dialogue,
:thought=> thought,
:actionname=> actionname,
:actioninput=> actioninput,
:location=> location,
:equipment_used=> equipment_used,
:material_used=> material_used,
@@ -194,6 +220,23 @@ function eventdict(;
end
function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVector}
events = memory[1:end-skiprecent]
timeline = ""
for (i, event) in enumerate(events)
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
else
timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end
end
return timeline
end
# """ Convert a single chat dictionary into LLM model instruct format.