This commit is contained in:
narawat lamaiin
2025-05-04 20:56:17 +07:00
parent 1fc5dfe820
commit a0152a3c29
4 changed files with 370 additions and 243 deletions

View File

@@ -125,8 +125,10 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
# """
# end
recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent)
recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent; includelatest=true)
recentevents = a.memory[:events][recent_ind]
recentEventsDict = createEventsLog(recentevents; eventindex=recent_ind)
timeline = createTimeline(recentevents; eventindex=recent_ind)
# recap as caching
@@ -160,23 +162,23 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
return responsedict
else
header = ["Thought:", "Plan:", "Action_name:", "Action_input:"]
dictkey = ["thought", "plan", "action_name", "action_input"]
header = ["Plan:", "Action_name:", "Action_input:"]
dictkey = ["plan", "action_name", "action_input"]
context = # may b add wine name instead of the hold wine data is better
if length(a.memory[:shortmem][:available_wine]) != 0
winenames = []
for (i, wine) in enumerate(a.memory[:shortmem][:available_wine])
name = "$i) $(wine["wine_name"]) "
push!(winenames, name)
end
availableWineName = join(winenames, ',')
"Available wines you've found in your inventory so far: $availableWineName"
else
""
end
# context = # may b add wine name instead of the hold wine data is better
# if length(a.memory[:shortmem][:available_wine]) != 0
# winenames = []
# for (i, wine) in enumerate(a.memory[:shortmem][:available_wine])
# name = "$i) $(wine["wine_name"]) "
# push!(winenames, name)
# end
# availableWineName = join(winenames, ',')
# "Available wines you've found in your inventory so far: $availableWineName"
# else
# ""
# end
errornote = ""
errornote = "N/A"
response = nothing # placeholder for show when error msg show up
#[PENDING] add 1) 3 decisions samples 2) compare and choose the best decision (correct tolls etc)
@@ -184,10 +186,10 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
:num_ctx => 32768,
:temperature => 0.5,
)
for attempt in 1:maxattempt
if attempt > 1
println("\nYiemAgent decisionMaker() attempt $attempt/$maxattempt ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
llmkwargs[:temperature] += 0.1
end
QandA = generatequestion(a, a.func[:text2textInstructLLM], timeline)
@@ -208,8 +210,7 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
At each round of conversation, you will be given the following information:
Your recent events: latest 5 events of the situation
Your Q&A: the question and answer you have asked yourself
Q&A: the question and answer you have asked yourself about the current situation
You must follow the following guidelines:
- Focus on the latest event
@@ -228,33 +229,32 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
For your information:
- Your store carries only wine.
- Vintage 0 means non-vintage.
- All wine in your inventory has no organic, sustainability and sulfite information.
You should then respond to the user with interleaving Thought, Plan, Action_name, Action_input:
1) Thought: Articulate your current understanding and consider the current situation.
2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific.
3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following tool names:
- CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific.
- CHECKINVENTORY allows you to check information about wines you want in your inventory. The input should be a specific search term in verbal English. A good search term should include details such as price range, winery, wine name, vintage, region, country, wine type, grape varietal, tasting notes, occasion, food to be paired with wine, intensity, tannin, sweetness, acidity.
Invalid query example: red wine that pair well with spicy food.
- CHECKINVENTORY allows you to check information about wines you want in your inventory's database. The input is search criteria. Supported search parameters include: wine price, winery, name, vintage, region, country, type, grape varietal, tasting notes, occasion, food pairing, intensity, tannin, sweetness, and acidity.
Example query: "Dry, full-bodied red wine from Burgundy, France or Tuscany, Italy. Merlot varietal. price 100 to 1000 USD."
- PRESENTBOX which you can use to present wines you have found in your inventory to the user. The input are wine names that you want to present.
- ENDCONVERSATION which you can use to properly end the conversation with the user. Input is "NA".
4) Action_input: The input to the action you are about to perform. This should be aligned with the plan
You should only respond in format as described below:
Thought: ...
Plan: ...
Action_name: ...
Action_input: ...
Let's begin!
"""
$context
Your recent events:
$timeline
Your Q&A:
$QandA
assistantinfo =
"""
<information>
Q&A: $QandA
P.S. $errornote
</information>
"""
unformatPrompt =
@@ -262,41 +262,16 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
Dict(:name => "system", :text => systemmsg),
]
#BUG found wine is "count 0" invalid return from CHECKINVENTORY()
# check if winename in shortmem occurred in chathistory. if not, skip decision and imediately use PRESENTBOX
# if length(a.memory[:shortmem][:found_wine]) != 0
# # check if wine name mentioned in recentevents, only check first wine name is enough
# # because agent will recommend every wines it found each time.
# winenames = []
# for wine in a.memory[:shortmem][:found_wine]
# push!(winenames, wine["wine_name"])
# end
# for winename in winenames
# if !occursin(winename, chathistory)
# println("\nYiem decisionMaker() found wines from DB ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# d = Dict(
# :thought=> "The user is looking for a wine tahat matches their intention and budget. I've checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.",
# :plan=> "1) I'll provide detailed introductions of the wines I just found to the user. 2) I'll explain how the wine could match the user's intention and what its effects might mean for the user's experience. 3) If multiple wines are available, I'll highlight their differences and provide a comprehensive comparison of how each option aligns with the user's intention and what the potential effects of each option could mean for the user's experience. 4) I'll provide my personal recommendation.",
# :action_name=> "PRESENTBOX",
# :action_input=> "I need to present to the user the following wines: $winenames")
# a.memory[:shortmem][:found_wine] = [] # clear because PRESENTBOX command is issued. This is to prevent decisionMaker() keep presenting the same wines
# result = (systemmsg=systemmsg, usermsg=usermsg, unformatPrompt=unformatPrompt, result=d)
# println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# pprintln(Dict(d))
# return result
# end
# end
# end
# change qwen format put in model format
unformatPrompt = vcat(unformatPrompt, recentEventsDict)
# put in model format
prompt = GeneralUtils.formatLLMtext(unformatPrompt, a.llmFormatName)
# add info
prompt = prompt * assistantinfo
response = a.func[:text2textInstructLLM](prompt; senderId=a.id, llmkwargs=llmkwargs)
response = GeneralUtils.remove_french_accents(response)
response = replace(response, "**"=>"")
response = replace(response, "***"=>"")
response = replace(response, "<|eot_id|>"=>"")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName; )
# check if response contain more than one functions from ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
count = 0
@@ -359,14 +334,14 @@ function decisionMaker(a::T; recent::Integer=10, maxattempt=10
end
checkFlag == true ? continue : nothing
# check if action_name = CHECKINVENTORY and action_input has the words "pairs well" or
# "pair well" in it because it is not a valid query.
detected_kw = GeneralUtils.detect_keyword(["pair", "pairs", "pairing", "well"], responsedict[:action_input])
if responsedict[:action_name] == "CHECKINVENTORY" && sum(values(detected_kw)) != 0
errornote = "In your previous attempt, action_input for CHECKINVENTORY function is invalid"
println("\nERROR YiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
# # check if action_name = CHECKINVENTORY and action_input has the words "pairs well" or
# # "pair well" in it because it is not a valid query.
# detected_kw = GeneralUtils.detect_keyword(["pair", "pairs", "pairing", "well"], responsedict[:action_input])
# if responsedict[:action_name] == "CHECKINVENTORY" && sum(values(detected_kw)) != 0
# errornote = "In your previous attempt, action_input for CHECKINVENTORY function was $(responsedict[:action_name]). It was not specific enough."
# println("\nERROR YiemAgent decisionMaker() $errornote => $(responsedict[:action_input]) ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# continue
# end
println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(Dict(responsedict))
@@ -497,7 +472,7 @@ end
# ""
# end
# errornote = ""
# errornote = "N/A"
# response = nothing # placeholder for show when error msg show up
# for attempt in 1:10
@@ -830,7 +805,7 @@ function evaluator(state::T1, text2textInstructLLM::Function
thoughthistory *= "$k: $v\n"
end
errornote = ""
errornote = "N/A"
for attempt in 1:10
errorFlag = false
@@ -1004,7 +979,7 @@ function conversation(a::sommelier, userinput::Dict; maximumMsg=50)
end
end
function conversation(a::companion, userinput::Dict;
function conversation(a::Union{companion, virtualcustomer}, userinput::Dict;
converPartnerName::Union{String, Nothing}=nothing,
maximumMsg=50)
@@ -1022,7 +997,7 @@ function conversation(a::companion, userinput::Dict;
eventdict(;
event_description="the user talks to the assistant.",
timestamp=Dates.now(),
subject=a.name,
subject="user",
actioninput=userinput[:text],
)
)
@@ -1075,7 +1050,7 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
(result=actioninput, errormsg=nothing, success=true)
elseif actionname == "ENDCONVERSATION"
x = "Conclude the conversation, thanks the user then goodbye and inviting them to return next time."
(result=x, errormsg=nothing, success=true)
(result=actioninput, errormsg=nothing, success=true)
else
error("undefined LLM function. Requesting $actionname")
end
@@ -1213,7 +1188,7 @@ function presentbox(a::sommelier, thoughtDict)
end
chathistory = chatHistoryToText(a.chathistory)
errornote = ""
errornote = "N/A"
response = nothing # placeholder for show when error msg show up
# yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])"
@@ -1224,7 +1199,6 @@ function presentbox(a::sommelier, thoughtDict)
if attempt > 1 # use to prevent LLM generate the same respond over and over
println("\nYiemAgent presentbox() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought)
# llmkwargs[:temperature] += 0.1
else
# yourthought1 = yourthought
end
@@ -1245,9 +1219,10 @@ function presentbox(a::sommelier, thoughtDict)
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
#[WORKING] update this function to use new llm
# update this function to use new llm
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# check whether response has not-allowed words
notallowed = ["respond:", "user>", "user:"]
detected_kw = GeneralUtils.detect_keyword(notallowed, response)
@@ -1397,19 +1372,18 @@ function generatechat(a::sommelier, thoughtDict)
errornote = "N/A"
response = nothing # placeholder for show when error msg show up
yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])"
yourthought = thoughtDict[:plan]
yourthought1 = nothing
llmkwargs=Dict(
:num_ctx => 32768,
:temperature => 0.2,
:temperature => 0.5,
)
for attempt in 1:10
if attempt > 1 # use to prevent LLM generate the same respond over and over
println("\nYiemAgent generatchat() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought)
llmkwargs[:temperature] += 0.1
else
yourthought1 = yourthought
end
@@ -1429,9 +1403,10 @@ function generatechat(a::sommelier, thoughtDict)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "qwen3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs)
response = GeneralUtils.deFormatLLMtext(response, "qwen3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# sometime the model response like this "here's how I would respond: ..."
if occursin("respond:", response)
@@ -1475,8 +1450,8 @@ function generatechat(a::sommelier, thoughtDict)
continue
end
println("\nYiemAgent generatechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(Dict(responsedict))
# println("\nYiemAgent generatechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# pprintln(Dict(responsedict))
# check whether an agent recommend wines before checking inventory or recommend wines
# outside its inventory
@@ -1510,7 +1485,7 @@ function generatechat(a::sommelier, thoughtDict)
error("generatechat failed to generate a response")
end
# modify it to work with customer object
function generatechat(a::companion; converPartnerName::Union{String, Nothing}=nothing, maxattempt=10)
response = nothing # placeholder for show when error msg show up
errornote = "N/A"
@@ -1548,6 +1523,7 @@ function generatechat(a::companion; converPartnerName::Union{String, Nothing}=no
response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs, senderId=a.id)
response = replace(response, "<|im_start|>"=> "")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# check whether LLM just repeat the previous dialogue
for msg in a.chathistory
@@ -1558,33 +1534,91 @@ function generatechat(a::companion; converPartnerName::Union{String, Nothing}=no
end
end
return response
end
error("generatechat failed to generate a response")
end
#[WORKING] some time it copy exactly the same text as previous conversation partner msg.
# modify it to work with customer object
function generatechat(a::virtualcustomer;
converPartnerName::Union{String, Nothing}=nothing, maxattempt=10, recentEventNum=10
)
recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recentEventNum; includelatest=true)
recentevents = a.memory[:events][recent_ind]
recentEventsDict = createEventsLog(recentevents; eventindex=recent_ind)
response = nothing # placeholder for show when error msg show up
errornote = "N/A"
header = ["Dialogue:", "Role"]
dictkey = ["dialogue", "role"]
llmkwargs=Dict(
:num_ctx => 32768,
:temperature => 0.5,
)
for attempt in 1:maxattempt
if attempt > 1
println("\nYiemAgent generatechat() attempt $attempt/$maxattempt ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
assistantinfo =
"""
<information>
P.S. $errornote
</information>
"""
unformatPrompt =
[
Dict(:name => "system", :text => a.systemmsg),
]
unformatPrompt = vcat(unformatPrompt, recentEventsDict)
# put in model format
prompt = GeneralUtils.formatLLMtext(unformatPrompt, a.llmFormatName)
# add info
prompt = prompt * assistantinfo
response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs, senderId=a.id)
response = replace(response, "<|im_start|>"=> "")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# check whether LLM just repeat the previous dialogue
for msg in a.chathistory
if msg[:text] == response
errornote = "In your previous attempt, you repeated the previous dialogue. Please try again."
println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
end
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
kwvalue = [i for i in values(detected_kw)]
zeroind = findall(x -> x == 0, kwvalue)
missingkeys = [header[i] for i in zeroind]
if 0 values(detected_kw)
errornote = "$missingkeys are missing from your previous response"
println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "Your previous attempt has duplicated points"
println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
if responsedict[:role] == "no"
errornote = "In your previous attempt you said $(responsedict[:dialogue]) which you, as a customer of a wine store, are not supposed to speak."
println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
# # check whether response has all header
# detected_kw = GeneralUtils.detect_keyword(header, response)
# kwvalue = [i for i in values(detected_kw)]
# zeroind = findall(x -> x == 0, kwvalue)
# missingkeys = [header[i] for i in zeroind]
# if 0 ∈ values(detected_kw)
# errornote = "$missingkeys are missing from your previous response"
# println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# continue
# elseif sum(values(detected_kw)) > length(header)
# errornote = "Your previous attempt has duplicated points"
# println("\nYiemAgent generatechat() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# continue
# end
# responsedict = GeneralUtils.textToDict(response, header;
# dictKey=dictkey, symbolkey=true)
# println("\n$prompt", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# println("\n $response")
return response
return responsedict[:dialogue]
end
error("generatechat failed to generate a response")
end
@@ -1635,11 +1669,10 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
You should then respond to the user with:
1) Thought: State your thought about the current situation
2) Q: "Ask yourself" at least five, but no more than ten, questions about the situation from your perspective.
2) Q: "Ask yourself" at least three, but no more than five, questions about the situation from your perspective.
3) A: Given the situation, "answer to yourself" the best you can. Do not generate any extra text after you finish answering all questions
You must only respond in format as described below:
Thought: ...
Q1: ...
A1: ...
Q2: ...
@@ -1671,6 +1704,8 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
A: No. I need more information from the user including ...
Q: What else do I need to know?
A: ...
Q: Should I present my item to the user?
A: Not yet, I will need to check my inventory first.
Q: Should I check our inventory now?
A: ...
Q: What the user intend to do with the car?
@@ -1689,14 +1724,14 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
A: ...
Q: what kind of car suitable for off-road trip?
A: A four-wheel drive SUV is a good choice for off-road trips.
Q: What car specification would satisfy the user's needs?
A: The user is seeking an eco-friendly vehicle that accommodates seven passengers, including seniors and children, with prioritized accessibility and efficient refueling. While electric vehicles (EVs) offer eco-friendly benefits, their long charging times make hybrid models more practical for fast refueling. Additionally, a lower ground level is essential for ease of entry/exit for seniors and children. A hybrid multi-purpose vehicle (MPV) emerges as the optimal solution, balancing sustainability, seating capacity, accessibility, and refueling efficiency.
Let's begin!
"""
header = ["Thought:", "Q1:"]
dictkey = ["thought", "q1"]
header = ["Q1:"]
dictkey = ["q1"]
context =
if length(a.memory[:shortmem][:available_wine]) != 0
@@ -1708,7 +1743,7 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
# recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent)
# recentevents = a.memory[:events][recent_ind]
# timeline = createTimeline(recentevents; eventindex=recent_ind)
errornote = ""
errornote = "N/A"
response = nothing # store for show when error msg show up
# recap =
@@ -1736,7 +1771,6 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
for attempt in 1:10
if attempt > 1
println("\nYiemAgent generatequestion() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
llmkwargs[:temperature] += 0.1
end
usermsg =
@@ -1753,11 +1787,13 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "qwen3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
response = text2textInstructLLM(prompt;
modelsize="medium", llmkwargs=llmkwargs, senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, "qwen3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# make sure generatequestion() don't have wine name that is not from retailer inventory
# check whether an agent recommend wines before checking inventory or recommend wines
# outside its inventory
@@ -1816,7 +1852,7 @@ function generatequestion(a, text2textInstructLLM::Function, timeline)::String
dictKey=dictkey, symbolkey=true)
response = "Q1: " * responsedict[:q1]
println("\nYiemAgent generatequestion() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(response)
try pprintln(response) catch e println(response) end
return response
end
@@ -1865,7 +1901,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
events = a.memory[:events][ind]
timeline = createTimeline(events)
errornote = ""
errornote = "N/A"
response = nothing # store for show when error msg show up
for attempt in 1:10
if attempt > 1 # use to prevent LLM generate the same respond over and over
@@ -1909,7 +1945,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
dictKey=dictkey, symbolkey=true)
println("\ngenerateSituationReport() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(response)
try pprintln(response) catch e println(response) end
return responsedict
end
@@ -1957,12 +1993,13 @@ function detectWineryName(a, text)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "qwen3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, "qwen3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
println("\ndetectWineryName() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(response)
try pprintln(response) catch e println(response) end
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)

View File

@@ -342,26 +342,24 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
User's query: ...
You must follow the following guidelines:
- If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this.
- If specific information required in the preference form is not available in the query or there isn't any, mark with "N/A" to indicate this.
Additionally, words like 'any' or 'unlimited' mean no information is available.
- Do not generate other comments.
You should then respond to the user with:
Thought: state your understanding of the current situation
Wine_name: name of the wine
Winery: name of the winery
Vintage: the year of the wine
Region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc
Country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
Country: a country where wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
Wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
Grape_varietal: the name of the primary grape used to make the wine
Tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
Wine_price: price range of wine.
Tasting_notes: a word describe the wine's flavor, such as "butter", "oak", "fruity", "raspberry", "earthy", "floral", etc
Wine_price_range: price range of wine. Example: For price 10-20, price range will be "10 to 20". For price 100, price range will be 0 to 100.
Occasion: the occasion the user is having the wine for
Food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should only respond in format as described below:
Thought: ...
Wine_name: ...
Winery: ...
Vintage: ...
@@ -370,40 +368,40 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
Wine_type:
Grape_varietal: ...
Tasting_notes: ...
Wine_price: ...
Wine_price_range: ...
Occasion: ...
Food_to_be_paired_with_wine: ...
Here are some example:
User's query: red, Chenin Blanc, Riesling, 20 USD
{"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red, white", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
User's query: red, Chenin Blanc, Riesling, 20 USD from Tuscany, Italy or Napa Valley, USA
Wine_name: N/A. Winery: N/A. Vintage: N/A. Region: Tuscany, Napa Valley. Country: Italy, United States. Wine_type: red, white. Grape_varietal: Chenin Blanc, Riesling. Tasting_notes: citrus. Wine_price_range: 0 to 20. Occasion: N/A. Food_to_be_paired_with_wine: N/A
User's query: Domaine du Collier Saumur Blanc 2019, France, white, Merlot
{"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Merlot", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
Winery: Domaine du Collier. Wine_name: Saumur Blanc. Vintage: 2019. Region: Saumur. Country: France. Wine_type: white. Grape_varietal: Merlot. Tasting_notes: plum. Wine_price_range: N/A. Occasion: N/A. Food_to_be_paired_with_wine: N/A.
Let's begin!
"""
header = ["Thought:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"]
dictkey = ["thought", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
header = ["Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price_range:", "Occasion:", "Food_to_be_paired_with_wine:"]
dictkey = ["wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price_range", "occasion", "food_to_be_paired_with_wine"]
errornote = "N/A"
llmkwargs=Dict(
:num_ctx => 32768,
:temperature => 0.5,
:temperature => 0.2,
)
for attempt in 1:maxattempt
#[PENDING] I should add generatequestion()
if attempt > 1
println("\nYiemAgent extractWineAttributes_1() attempt $attempt/$maxattempt ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
usermsg =
"""
User's query: $input
$input
"""
assistantinfo =
"""
<information>
P.S. $errornote
</information>
"""
_prompt =
@@ -413,23 +411,30 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "granite3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
# add info
prompt = prompt * assistantinfo
response = a.func[:text2textInstructLLM](prompt;
modelsize="medium", llmkwargs=llmkwargs, senderId=a.id)
response = GeneralUtils.remove_french_accents(response)
response = GeneralUtils.deFormatLLMtext(response, "granite3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# check wheter all attributes are in the response
checkFlag = false
for word in header
if !occursin(word, response)
errornote = "In your previous attempts, the $word attribute is missing. Please try again."
println("\nYiemAgent extractWineAttributes_1() Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
kwvalue = [i for i in values(detected_kw)]
zeroind = findall(x -> x == 0, kwvalue)
missingkeys = [header[i] for i in zeroind]
if 0 values(detected_kw)
errornote = "$missingkeys are missing from your previous response"
println("\nERROR YiemAgent decisionMaker() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "Your previous attempt has duplicated points"
println("\nERROR YiemAgent decisionMaker() $errornote:\n$response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
end
checkFlag == true ? continue : nothing
# check whether response has all answer's key points
detected_kw = GeneralUtils.detect_keyword(header, response)
@@ -440,6 +445,7 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
elseif sum(values(detected_kw)) > length(header)
errornote = "In your previous attempts, the response has duplicated answer's key points"
println("\nYiemAgent extractWineAttributes_1() Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
println(response)
continue
end
responsedict = GeneralUtils.textToDict(response, header;
@@ -460,29 +466,29 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
if j [:thought, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
# in case j is wine_price it needs to be checked differently because its value is ranged
if j == :wine_price
if responsedict[:wine_price] != "NA"
if responsedict[:wine_price] != "N/A"
# check whether wine_price is in ranged number
if !occursin('-', responsedict[:wine_price])
errornote = "In your previous attempt, the 'wine_price' was not set to a ranged number. Please adjust it accordingly."
if !occursin("to", responsedict[:wine_price])
errornote = "In your previous attempt, the 'wine_price' was set to $(responsedict[:wine_price]) which is not a correct format. Please adjust it accordingly."
println("\nERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
# check whether max wine_price is in the input
pricerange = split(responsedict[:wine_price], '-')
minprice = pricerange[1]
maxprice = pricerange[end]
if !occursin(maxprice, input)
responsedict[:wine_price] = "NA"
end
# price range like 100-100 is not good
if minprice == maxprice
errornote = "In your previous attempt, you inputted 'wine_price' with a 'minimum' value equaling the 'maximum', which is not valid."
println("\nERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
# # check whether max wine_price is in the input
# pricerange = split(responsedict[:wine_price], '-')
# minprice = pricerange[1]
# maxprice = pricerange[end]
# if !occursin(maxprice, input)
# responsedict[:wine_price] = "N/A"
# end
# # price range like 100-100 is not good
# if minprice == maxprice
# errornote = "In your previous attempt, you inputted 'wine_price' with a 'minimum' value equaling the 'maximum', which is not valid."
# println("\nERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# checkFlag = true
# break
# end
end
else
content = responsedict[j]
@@ -517,7 +523,7 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v)
if !occursin("N/A", v) && v != "" && !occursin("none", v) && !occursin("None", v)
result *= "$k: $v, "
end
end
@@ -580,7 +586,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
sweetness, acidity, tannin, intensity
You must follow the following guidelines:
1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'NA' to indicate this.
1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'N/A' to indicate this.
Additionally, words like 'any' or 'unlimited' mean no information is available.
2) Use the conversion table to convert the descriptive word level of sweetness, intensity, tannin, and acidity into a corresponding integer.
3) Do not generate other comments.
@@ -605,8 +611,8 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
Here are some examples:
User's query: I want a wine with a medium-bodied, low acidity, medium tannin.
Sweetness_keyword: NA
Sweetness: NA
Sweetness_keyword: N/A
Sweetness: N/A
Acidity_keyword: low acidity
Acidity: 1-2
Tannin_keyword: medium tannin
@@ -615,20 +621,20 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
Intensity: 3-4
User's query: German red wine, under 100, pairs with spicy food
Sweetness_keyword: NA
Sweetness: NA
Acidity_keyword: NA
Acidity: NA
Tannin_keyword: NA
Tannin: NA
Intensity_keyword: NA
Intensity: NA
Sweetness_keyword: N/A
Sweetness: N/A
Acidity_keyword: N/A
Acidity: N/A
Tannin_keyword: N/A
Tannin: N/A
Intensity_keyword: N/A
Intensity: N/A
Let's begin!
"""
header = ["Sweetness_keyword:", "Sweetness:", "Acidity_keyword:", "Acidity:", "Tannin_keyword:", "Tannin:", "Intensity_keyword:", "Intensity:"]
dictkey = ["sweetness_keyword", "sweetness", "acidity_keyword", "acidity", "tannin_keyword", "tannin", "intensity_keyword", "intensity"]
errornote = ""
errornote = "N/A"
for attempt in 1:10
usermsg =
@@ -645,10 +651,11 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "granite3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.deFormatLLMtext(response, "granite3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# check whether response has all answer's key points
detected_kw = GeneralUtils.detect_keyword(header, response)
@@ -669,23 +676,23 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
for i in ["sweetness", "acidity", "tannin", "intensity"]
keyword = Symbol(i * "_keyword") # e.g. sweetness_keyword
value = responsedict[keyword]
if value != "NA" && !occursin(value, input)
if value != "N/A" && !occursin(value, input)
errornote = "In your previous attempt, keyword $keyword: $value does not appear in the input. You must use information from the input only"
println("\nERROR YiemAgent extractWineAttributes_2() Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
# if value == "NA" then responsedict[i] = "NA"
# e.g. if sweetness_keyword == "NA" then sweetness = "NA"
if value == "NA"
responsedict[Symbol(i)] = "NA"
# if value == "N/A" then responsedict[i] = "N/A"
# e.g. if sweetness_keyword == "N/A" then sweetness = "N/A"
if value == "N/A"
responsedict[Symbol(i)] = "N/A"
end
end
# some time LLM not put integer range
for (k, v) in responsedict
if !occursin("keyword", string(k))
if v !== "NA" && (!occursin('-', v) || length(v) > 5)
if v !== "N/A" && (!occursin('-', v) || length(v) > 5)
errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max."
println("\nERROR YiemAgent extractWineAttributes_2() Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
@@ -693,10 +700,10 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
end
end
# some time LLM says NA-2. Need to convert NA to 1
# some time LLM says N/A-2. Need to convert N/A to 1
for (k, v) in responsedict
if occursin("NA", v) && occursin("-", v)
new_v = replace(v, "NA"=>"1")
if occursin("N/A", v) && occursin("-", v)
new_v = replace(v, "N/A"=>"1")
responsedict[k] = new_v
end
end
@@ -704,7 +711,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
result = ""
for (k, v) in responsedict
# some time LLM generate text with "(some comment)". this line removes it
if !occursin("NA", v)
if !occursin("N/A", v)
result *= "$k: $v, "
end
end
@@ -756,7 +763,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
header = ["Paraphrase:"]
dictkey = ["paraphrase"]
errornote = ""
errornote = "N/A"
response = nothing # placeholder for show when error msg show up
@@ -773,11 +780,12 @@ function paraphrase(text2textInstructLLM::Function, text::String)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, "granite3")
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
try
response = text2textInstructLLM(prompt)
response = GeneralUtils.deFormatLLMtext(response, "granite3")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
# sometime the model response like this "here's how I would respond: ..."
if occursin("respond:", response)
errornote = "You don't need to intro your response"

View File

@@ -1,6 +1,6 @@
module type
export agent, sommelier, companion
export agent, sommelier, companion, virtualcustomer
using Dates, UUIDs, DataStructures, JSON3
using GeneralUtils
@@ -24,16 +24,12 @@ end
function companion(
func::NamedTuple # NamedTuple of functions
;
systemmsg::Union{String, Nothing}= nothing,
name::String= "Assistant",
id::String= GeneralUtils.uuid4snakecase(),
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
llmFormatName::String= "granite3"
)
if systemmsg === nothing
systemmsg =
llmFormatName::String= "granite3",
systemmsg::String=
"""
Your name: $name
Your sex: Female
@@ -43,8 +39,8 @@ function companion(
- Your like to be short and concise.
Let's begin!
"""
end
""",
)
tools = Dict( # update input format
"CHATBOX"=> Dict(
@@ -222,6 +218,80 @@ function sommelier(
end
mutable struct virtualcustomer <: agent
name::String # agent name
id::String # agent id
systemmsg::String # system message
tools::Dict
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
func # NamedTuple of functions
llmFormatName::String
end
function virtualcustomer(
func, # NamedTuple of functions
;
name::String= "Assistant",
id::String= string(uuid4()),
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
llmFormatName::String= "granite3",
systemmsg::String=
"""
Your name: $name
Your sex: Female
Your role: You are a helpful assistant.
You should follow the following guidelines:
- Focus on the latest conversation.
- Your like to be short and concise.
Let's begin!
""",
)
tools = Dict( # update input format
"chatbox"=> Dict(
:description => "<askbox tool description>Useful for when you need to ask the user for more context. Do not ask the user their own question.</askbox tool description>",
:input => """<input>Input is a text in JSON format.</input><input example>{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}</input example>""",
:output => "" ,
),
)
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
memory = Dict{Symbol, Any}(
:shortmem=> OrderedDict{Symbol, Any}(
),
:events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}(
),
:recap=> OrderedDict{Symbol, Any}(),
)
newAgent = virtualcustomer(
name,
id,
systemmsg,
tools,
maxHistoryMsg,
chathistory,
memory,
func,
llmFormatName
)
return newAgent
end

View File

@@ -1,7 +1,7 @@
module util
export clearhistory, addNewMessage, chatHistoryToText, eventdict, noises, createTimeline,
availableWineToText
availableWineToText, createEventsLog
using UUIDs, Dates, DataStructures, HTTP, JSON3
using GeneralUtils
@@ -313,35 +313,47 @@ function createTimeline(events::T1; eventindex::Union{UnitRange, Nothing}=nothin
end
# function createTimeline(events::T1; eventindex::Union{UnitRange, Nothing}=nothing
# ) where {T1<:AbstractVector}
# # Initialize empty timeline string
# timeline = ""
function createEventsLog(events::T1; eventindex::Union{UnitRange, Nothing}=nothing
) where {T1<:AbstractVector}
# Initialize empty log array
log = Dict{Symbol, String}[]
# Determine which indices to use - either provided range or full length
ind =
if eventindex !== nothing
[eventindex...]
else
1:length(events)
end
# Iterate through events and format each one
for (i, event) in zip(ind, events)
# If no outcome exists, format without outcome
if event[:outcome] === nothing
subject = event[:subject]
actioninput = event[:actioninput]
d = Dict{Symbol, String}(:name=>subject, :text=>actioninput)
push!(log, d)
else
subject = event[:subject]
actioninput = event[:actioninput]
outcome = event[:outcome]
str = "$subject: $actioninput $outcome"
d = Dict{Symbol, String}(:name=>subject, :text=>str)
push!(log, d)
end
end
return log
end
# # Determine which indices to use - either provided range or full length
# ind =
# if eventindex !== nothing
# [eventindex...]
# else
# 1:length(events)
# end
# # Iterate through events and format each one
# for (i, event) in zip(ind, events)
# # If no outcome exists, format without outcome
# subject = titlecase(event[:subject])
# if event[:outcome] === nothing
# timeline *= "Event_$i) Who: $subject Action_name: $(event[:actionname]) Action_input: $(event[:actioninput])\n"
# # If outcome exists, include it in formatting
# else
# timeline *= "Event_$i) Who: $subject Action_name: $(event[:actionname]) Action_input: $(event[:actioninput]) Action output: $(event[:outcome])\n"
# end
# end
# # Return formatted timeline string
# return timeline
# end