diff --git a/src/interface.jl b/src/interface.jl
index 9df7fb2..622ef50 100644
--- a/src/interface.jl
+++ b/src/interface.jl
@@ -97,7 +97,7 @@ julia> output_thoughtDict = Dict(
# Signature
"""
-function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agent}
+function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:agent}
# lessonDict = copy(JSON3.read("lesson.json"))
@@ -124,23 +124,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
# """
# end
- totalevents = length(a.memory[:events])
- ind =
- if totalevents > recent
- start = totalevents - recent
- start:totalevents
- else
- 1:totalevents
- end
-
- recentevents = ""
- for (i, event) in enumerate(a.memory[:events][ind])
- if event[:outcome] === nothing
- recentevents *= "$i) $(event[:subject])> $(event[:actioninput])\n"
- else
- recentevents *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
- end
- end
+ recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent)
+ recentevents = a.memory[:events][recent_ind]
+ timeline = createTimeline(recentevents)
#[TESTING] recap as caching
# query similar result from vectorDB
@@ -165,7 +151,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
end
recentrecap = GeneralUtils.dictToString_noKey(_recentrecap)
- similarDecision = a.func[:similarSommelierDecision](recentrecap)
+ # similarDecision = a.func[:similarSommelierDecision](recentrecap)
+ similarDecision = nothing #CHANGE
if similarDecision !== nothing
responsedict = similarDecision
@@ -176,7 +163,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
Your goal includes:
1) Establish a connection with the customer by greeting them warmly
- 2) Help them select the best wines only from your store's inventory that align with their preferences
+ 2) Guide them to select the best wines only from your store's inventory that align with their preferences
Your responsibility includes:
1) Make an informed decision about what you need to do to achieve the goal
@@ -197,7 +184,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
- Approach each customer with open-ended questions to understand their preferences, budget, and occasion. This will help you guide the conversation naturally while gathering essential insights. Once you have this information, you can efficiently check your inventory for the best match.
- Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
- Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- - Medium and full-bodied red wines should not be paired with spicy foods.
+ - Spicy foods should not be paired with medium and full-bodied red wines.
You should follow the following guidelines:
- When searching an inventory, search as broadly as possible based on the information you have gathered so far.
@@ -208,23 +195,20 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
- Your store carries only wine.
- Vintage 0 means non-vintage.
- You should then respond to the user with interleaving Understanding, Reasoning, Plan, Action:
- 1) Understanding:
- - State your understanding about the current situation.
- 2) Reasoning:
- - State your step by step reasoning about the current situation.
- 3) Plan: Based on the current situation, state a complete plan to complete the task. Be specific.
- 4) Action_name (Must be aligned with your plan): The name of the action. Typically corresponds to the execution of the first step in your plan.
- Can be one of the following functions:
- - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific.
- - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English.
- Good query example: white wine, full-bodied, France, less than 2000 USD.
- - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA".
- 5) Action_input: input of the action
+ You should then respond to the user with interleaving Thought, Plan, Action_name, Action_input:
+ 1) Thought: Articulate your current understanding and consider the present situation.
+ 2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific.
+ 3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following function names:
+ - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific.
+ - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English.
+ Bad query example 1: red wine that pair well with spicy food.
+ Bad query example 2: white wine that goes well with party food.
+
+ - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA".
+ 4) Action_input: input of the action
You should only respond in format as described below:
- Understanding: ...
- Reasoning: ...
+ Thought: ...
Plan: ...
Action_name: ...
Action_input: ...
@@ -232,8 +216,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
Let's begin!
"""
- header = ["Understanding:", "Reasoning:", "Plan:", "Action_name:", "Action_input:"]
- dictkey = ["understanding", "reasoning", "plan", "action_name", "action_input"]
+ header = ["Thought:", "Plan:", "Action_name:", "Action_input:"]
+ dictkey = ["thought", "plan", "action_name", "action_input"]
chathistory = chatHistoryToText(a.chathistory)
@@ -248,10 +232,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
for winename in winenames
if !occursin(winename, chathistory)
- println("\nYiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\nYiem decisionMaker() found wines from DB ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
d = Dict(
- :understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.",
- :reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.",
+ :thought=> "I understand that the customer is looking for a wine that matches their intention and budget. I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.",
:plan=> "1) Provide detailed introductions of the wines you just found to the customer.
2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience.
3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience.
@@ -286,7 +269,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
usermsg =
"""
$context
- Your recent events: $recentevents
+ Your recent events: $timeline
Your Q&A: $QandA)
$errornote
"""
@@ -315,17 +298,17 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
end
if count > 1
errornote = "You must use only one function"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
- errornote = "\nSQL evaluator() response does not have all header"
+ if 0 ∈ values(detected_kw)
+ errornote = "\nYiemAgent decisionMaker() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
- errornote = "\nSQL evaluator() response has duplicated header"
+ errornote = "\nYiemAgent decisionMaker() response has duplicated header"
continue
end
@@ -334,16 +317,15 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
if responsedict[:action_name] ∉ ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
checkFlag = false
- for i ∈ [:understanding, :plan, :action_name]
+ for i ∈ Symbol.(dictkey)
if length(responsedict[i]) == 0
- error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
errornote = "$i is empty"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -352,18 +334,18 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
# check if there are more than 1 key per categories
checkFlag = false
- for i ∈ [:understanding, :plan, :action_name, :action_input]
+ for i ∈ Symbol.(dictkey)
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
if length(matchkeys) > 1
errornote = "DecisionMaker has more than one key per categories"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
end
checkFlag == true ? continue : nothing
- println("\nYiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(Dict(responsedict))
# check whether an agent recommend wines before checking inventory or recommend wines
@@ -390,7 +372,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
isWineInEvent == false
errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first."
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
end
@@ -543,7 +525,7 @@ end
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
-# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# end
# end
# error("evaluator failed to generate an evaluation")
@@ -673,7 +655,7 @@ end
# showerror(io, e)
# errorMsg = String(take!(io))
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
-# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# end
# end
# error("reflector failed to generate a thought")
@@ -864,10 +846,36 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
subject="assistant",
thought=thoughtDict,
actionname=actionname,
- actioninput=chatresponse,
+ actioninput=actioninput,
)
)
result = chatresponse
+ # if actionname ∈ ["CHATBOX", "ENDCONVERSATION"]
+ # # chatresponse = generatechat(a, thoughtDict)
+ # push!(a.memory[:events],
+ # eventdict(;
+ # event_description="the assistant talks to the user.",
+ # timestamp=Dates.now(),
+ # subject="assistant",
+ # thought=thoughtDict,
+ # actionname=actionname,
+ # actioninput=actioninput,
+ # )
+ # )
+ # result = actioninput
+ # elseif actionname ∈ ["PRESENTBOX"]
+ # chatresponse = generatechat(a, thoughtDict)
+ # push!(a.memory[:events],
+ # eventdict(;
+ # event_description="the assistant talks to the user.",
+ # timestamp=Dates.now(),
+ # subject="assistant",
+ # thought=thoughtDict,
+ # actionname=actionname,
+ # actioninput=chatresponse,
+ # )
+ # )
+ # result = chatresponse
elseif actionname == "CHECKINVENTORY"
if rawresponse !== nothing
@@ -895,13 +903,171 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh
)
)
else
- error("condition is not defined ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ error("condition is not defined ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
return (actionname=actionname, result=result)
end
+#[WORKING]
+function presentbox(a::sommelier, thoughtDict)
+ systemmsg =
+ """
+
+ Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
+
+
+ You have checked the inventory and found wines that match the customer's criteria.
+
+
+ Present the wines to the customer in a way that keep the conversation smooth and engaging.
+
+
+ Your ongoing conversation with the user: ...
+ Inventory check result: ...
+ Your thoughts: Your current thoughts in your mind
+
+
+ - Do not offer additional services you didn't think.
+ - Focus on plan.
+
+
+ - Focus on the latest conversation.
+ - If the user interrupts, prioritize the user
+ - Be honest
+ - Medium and full-bodied red wines should not be paired with spicy foods.
+
+
+ Chat: ...
+
+
+ Your ongoing conversation with the user: "user> hello, I need a new car\n"
+ Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022"
+ Your thoughts: "I should recommend the car we have to the user."
+ Chat: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?"
+
+
+ Let's begin!
+ """
+
+ header = ["Chat:"]
+ dictkey = ["chat"]
+
+ # a.memory[:shortmem][:available_wine] is a vector of dictionary
+ context =
+ if length(a.memory[:shortmem][:available_wine]) != 0
+ "Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))"
+ else
+ "N/A"
+ end
+
+ chathistory = chatHistoryToText(a.chathistory)
+ errornote = ""
+ response = nothing # placeholder for show when error msg show up
+
+ yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])"
+ yourthought1 = nothing
+
+ for attempt in 1:10
+
+ if attempt > 1 # use to prevent LLM generate the same respond over and over
+ yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought)
+ else
+ yourthought1 = yourthought
+ end
+
+ usermsg = """
+
+ $chathistory
+
+
+ $context
+
+
+ $yourthought1
+
+ $errornote
+ """
+
+ _prompt =
+ [
+ Dict(:name => "system", :text => systemmsg),
+ Dict(:name => "user", :text => usermsg)
+ ]
+
+ # put in model format
+ prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
+
+ response = a.func[:text2textInstructLLM](prompt)
+ # sometime the model response like this "here's how I would respond: ..."
+ if occursin("respond:", response)
+ errornote = "You don't need to intro your response"
+ error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ end
+ response = GeneralUtils.remove_french_accents(response)
+ response = replace(response, '*'=>"")
+ response = replace(response, '$' => "USD")
+ response = replace(response, '`' => "")
+ response = replace(response, "<|eot_id|>"=>"")
+ response = GeneralUtils.remove_french_accents(response)
+
+ # check whether response has all header
+ detected_kw = GeneralUtils.detect_keyword(header, response)
+ if 0 ∈ values(detected_kw)
+ errornote = "\nYiemAgent generatechat() response does not have all header"
+ continue
+ elseif sum(values(detected_kw)) > length(header)
+ errornote = "\nnYiemAgent generatechat() response has duplicated header"
+ continue
+ end
+
+ responsedict = GeneralUtils.textToDict(response, header;
+ dictKey=dictkey, symbolkey=true)
+
+ # check if Context: is in chat
+ if occursin("Context:", responsedict[:chat])
+ error("Context: is in text. This is not allowed")
+ end
+
+ println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ pprintln(Dict(responsedict))
+
+ # check whether an agent recommend wines before checking inventory or recommend wines
+ # outside its inventory
+ # ask LLM whether there are any winery mentioned in the response
+ mentioned_winery = detectWineryName(a, responsedict[:chat])
+ if mentioned_winery != "None"
+ mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
+
+ # check whether the wine is in event
+ isWineInEvent = false
+ for winename in mentioned_winery
+ for event in a.memory[:events]
+ if event[:outcome] !== nothing && occursin(winename, event[:outcome])
+ isWineInEvent = true
+ break
+ end
+ end
+ end
+
+ # if wine is mentioned but not in timeline or shortmem,
+ # then the agent is not supposed to recommend the wine
+ if isWineInEvent == false
+
+ errornote = "Previously, You recommend wines that is not in your inventory which is not allowed."
+ error("Previously, You recommend wines that is not in your inventory which is not allowed.")
+ end
+ end
+
+ result = responsedict[:chat]
+
+ return result
+ end
+ error("generatechat failed to generate a response")
+end
+
+
"""
@@ -925,50 +1091,41 @@ julia>
function generatechat(a::sommelier, thoughtDict)
systemmsg =
"""
+ Your role:
Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
- You are currently talking with the user.
- Your goal includes:
- 1) Help the user select the best wines from your inventory that align with the user's preferences.
-
- Your responsibility includes:
- 1) Given the situation, convey your thoughts to the user.
-
- Your responsibility does NOT includes:
- 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store.
- 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store.
- 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
-
- At each round of conversation, you will be given the current situation:
- Your ongoing conversation with the user: ...
- Context: ...
+ Situation:
+ You have some thinking in mind while you are talking with the user.
+ Your mission:
+ Concentrate on your thoughts and articulate them clearly. Keep the conversation engaging.
+ Your responsibility does NOT includes:
+ - Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store.
+ - Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store.
+ - Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
+ At each round of conversation, you will be given the following:
+ Additional info: ...
Your thoughts: Your current thoughts in your mind
-
- You MUST follow the following guidelines:
- - Do not offer additional services you didn't thought.
- - Focus on plan.
-
- You should follow the following guidelines:
- - Focus on the latest conversation.
+ Your ongoing conversation with the user: ...
+ You must follow the following guidelines:
+ - Do not offer additional services you didn't think
+ You should follow the following guidelines:
+ - Focus on the latest conversation
- If the user interrupts, prioritize the user
- Be honest
- - Medium and full-bodied red wines should not be paired with spicy foods.
-
- You should then respond to the user with:
- 1) Chat: Given the situation, How would you respond to the user to express your thoughts honestly and keep the conversation going smoothly?
-
- You should only respond in format as described below:
- Chat: ...
-
- Here are some examples:
+ You should then respond to the user with:
+ Dialogue: what you want to say to the user
+ You should only respond in format as described below:
+ Dialogue: ...
+ Here are some examples:
Your ongoing conversation with the user: "user> hello, I need a new car\n"
- Context: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022"
- Chat: "Oh, we have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?"
+ Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022"
+ Your thoughts: "I should recommend the car we have to the user."
+ Dialogue: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?"
- Let's begin!
+ Let's begin!
"""
-
- header = ["Chat:"]
- dictkey = ["chat"]
+ #[WORKING] remove "chat"
+ header = ["Dialogue:"]
+ dictkey = ["dialogue"]
# a.memory[:shortmem][:available_wine] is a vector of dictionary
context =
@@ -982,7 +1139,7 @@ function generatechat(a::sommelier, thoughtDict)
errornote = ""
response = nothing # placeholder for show when error msg show up
- yourthought = "$(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])"
+ yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])"
yourthought1 = nothing
for attempt in 1:10
@@ -993,12 +1150,13 @@ function generatechat(a::sommelier, thoughtDict)
yourthought1 = yourthought
end
- usermsg = """
- Your ongoing conversation with the user: $chathistory
- $context
- Your thoughts: $yourthought1
- $errornote
- """
+ usermsg =
+ """
+ $errornote
+ Additional info: $context
+ Your thoughts: $yourthought1
+ Your ongoing conversation with the user: $chathistory
+ """
_prompt =
[
@@ -1009,89 +1167,237 @@ function generatechat(a::sommelier, thoughtDict)
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
- try
- response = a.func[:text2textInstructLLM](prompt)
- # sometime the model response like this "here's how I would respond: ..."
- if occursin("respond:", response)
- errornote = "You don't need to intro your response"
- error("generatechat() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
- end
- response = GeneralUtils.remove_french_accents(response)
- response = replace(response, '*'=>"")
- response = replace(response, '$' => "USD")
- response = replace(response, '`' => "")
- response = replace(response, "<|eot_id|>"=>"")
- response = GeneralUtils.remove_french_accents(response)
+ response = a.func[:text2textInstructLLM](prompt)
+ # sometime the model response like this "here's how I would respond: ..."
+ if occursin("respond:", response)
+ errornote = "You don't need to intro your response"
+ error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ end
+ response = GeneralUtils.remove_french_accents(response)
+ response = replace(response, '*'=>"")
+ response = replace(response, '$' => "USD")
+ response = replace(response, '`' => "")
+ response = replace(response, "<|eot_id|>"=>"")
+ response = GeneralUtils.remove_french_accents(response)
- # check whether response has all header
- detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
- errornote = "\nYiemAgent generatechat() response does not have all header"
- continue
- elseif sum(values(detected_kw)) > length(header)
- errornote = "\nnYiemAgent generatechat() response has duplicated header"
- continue
- end
+ # check whether response has all header
+ detected_kw = GeneralUtils.detect_keyword(header, response)
+ kwvalue = [i for i in values(detected_kw)]
+ zeroind = findall(x -> x == 0, kwvalue)
+ missingkeys = [header[i] for i in zeroind]
+ if 0 ∈ values(detected_kw)
+ errornote = "$missingkeys are missing from your previous response"
+ println("\nYiemAgent generatechat() $errornote:\n $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ elseif sum(values(detected_kw)) > length(header)
+ errornote = "Your response has duplicated points"
+ println("\n$errornote: $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ end
- responsedict = GeneralUtils.textToDict(response, header;
- dictKey=dictkey, symbolkey=true)
+ responsedict = GeneralUtils.textToDict(response, header;
+ dictKey=dictkey, symbolkey=true)
- # # check if there are more than 1 key per categories
- # for i ∈ Symbol.(dictkey)
- # matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
- # if length(matchkeys) > 1
- # error("generatechat has more than one key per categories")
- # end
- # end
+ # check if Context: is in chat
+ if occursin("Context:", responsedict[:dialogue])
+ println("\nYiemAgent generatechat() context is in response. This is not allowed", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ end
- # check if Context: is in chat
- if occursin("Context:", responsedict[:chat])
- error("Context: is in text. This is not allowed")
- end
+ println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ pprintln(Dict(responsedict))
- println("\ngeneratechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
- pprintln(Dict(responsedict))
+ # check whether an agent recommend wines before checking inventory or recommend wines
+ # outside its inventory
+ # ask LLM whether there are any winery mentioned in the response
+ mentioned_winery = detectWineryName(a, response)
+ if mentioned_winery != "None"
+ mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
- # check whether an agent recommend wines before checking inventory or recommend wines
- # outside its inventory
- # ask LLM whether there are any winery mentioned in the response
- mentioned_winery = detectWineryName(a, responsedict[:chat])
- if mentioned_winery != "None"
- mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
-
- # check whether the wine is in event
- isWineInEvent = false
- for winename in mentioned_winery
- for event in a.memory[:events]
- if event[:outcome] !== nothing && occursin(winename, event[:outcome])
- isWineInEvent = true
- break
- end
+ # check whether the wine is in event
+ isWineInEvent = false
+ for winename in mentioned_winery
+ for event in a.memory[:events]
+ if event[:outcome] !== nothing && occursin(winename, event[:outcome])
+ isWineInEvent = true
+ break
end
end
-
- # if wine is mentioned but not in timeline or shortmem,
- # then the agent is not supposed to recommend the wine
- if isWineInEvent == false
-
- errornote = "Previously, You recommend wines that is not in your inventory which is not allowed."
- error("Previously, You recommend wines that is not in your inventory which is not allowed.")
- end
end
- result = responsedict[:chat]
-
- return result
- catch e
- io = IOBuffer()
- showerror(io, e)
- errorMsg = String(take!(io))
- st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
- println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ # then the agent is not supposed to recommend the wine
+ if isWineInEvent == false
+ errornote = "You recommended wines that are not in your inventory before. Please only recommend wines that you have previously found in your inventory."
+ println("\nERROR YiemAgent generatechat() $errornote $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ end
end
+ result = responsedict[:dialogue]
+
+ return result
end
error("generatechat failed to generate a response")
end
+# function generatechat(a::sommelier, thoughtDict)
+# systemmsg =
+# """
+#
+# Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
+#
+#
+# You have some thinking in mind while you are talking with the user.
+#
+#
+# Concentrate on your thoughts and articulate them clearly. Keep the conversation remains engaging.
+#
+#
+# - Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store.
+# - Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store.
+# - Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
+#
+#
+# Your ongoing conversation with the user: ...
+# Additional info: ...
+# Your thoughts: Your current thoughts in your mind
+#
+#
+# - Do not offer additional services you didn't think.
+# - Focus on plan.
+#
+#
+# - Focus on the latest conversation.
+# - If the user interrupts, prioritize the user
+# - Be honest
+# - Medium and full-bodied red wines should not be paired with spicy foods.
+#
+#
+# Chat: ...
+#
+#
+# Your ongoing conversation with the user: "user> hello, I need a new car\n"
+# Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022"
+# Your thoughts: "I should recommend the car we have to the user."
+# Chat: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?"
+#
+
+# Let's begin!
+# """
+
+# header = ["Chat:"]
+# dictkey = ["chat"]
+
+# # a.memory[:shortmem][:available_wine] is a vector of dictionary
+# context =
+# if length(a.memory[:shortmem][:available_wine]) != 0
+# "Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))"
+# else
+# "N/A"
+# end
+
+# chathistory = chatHistoryToText(a.chathistory)
+# errornote = ""
+# response = nothing # placeholder for show when error msg show up
+
+# yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])"
+# yourthought1 = nothing
+
+# for attempt in 1:10
+
+# if attempt > 1 # use to prevent LLM generate the same respond over and over
+# yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought)
+# else
+# yourthought1 = yourthought
+# end
+
+# usermsg = """
+#
+# $chathistory
+#
+#
+# $context
+#
+#
+# $yourthought1
+#
+# $errornote
+# """
+
+# _prompt =
+# [
+# Dict(:name => "system", :text => systemmsg),
+# Dict(:name => "user", :text => usermsg)
+# ]
+
+# # put in model format
+# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
+
+# response = a.func[:text2textInstructLLM](prompt)
+# # sometime the model response like this "here's how I would respond: ..."
+# if occursin("respond:", response)
+# errornote = "You don't need to intro your response"
+# error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+# end
+# response = GeneralUtils.remove_french_accents(response)
+# response = replace(response, '*'=>"")
+# response = replace(response, '$' => "USD")
+# response = replace(response, '`' => "")
+# response = replace(response, "<|eot_id|>"=>"")
+# response = GeneralUtils.remove_french_accents(response)
+
+# # check whether response has all header
+# detected_kw = GeneralUtils.detect_keyword(header, response)
+# if 0 ∈ values(detected_kw)
+# errornote = "\nYiemAgent generatechat() response does not have all header"
+# continue
+# elseif sum(values(detected_kw)) > length(header)
+# errornote = "\nnYiemAgent generatechat() response has duplicated header"
+# continue
+# end
+
+# responsedict = GeneralUtils.textToDict(response, header;
+# dictKey=dictkey, symbolkey=true)
+
+# # check if Context: is in chat
+# if occursin("Context:", responsedict[:chat])
+# error("Context: is in text. This is not allowed")
+# end
+
+# println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+# pprintln(Dict(responsedict))
+
+# # check whether an agent recommend wines before checking inventory or recommend wines
+# # outside its inventory
+# # ask LLM whether there are any winery mentioned in the response
+# mentioned_winery = detectWineryName(a, responsedict[:chat])
+# if mentioned_winery != "None"
+# mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
+
+# # check whether the wine is in event
+# isWineInEvent = false
+# for winename in mentioned_winery
+# for event in a.memory[:events]
+# if event[:outcome] !== nothing && occursin(winename, event[:outcome])
+# isWineInEvent = true
+# break
+# end
+# end
+# end
+
+# # if wine is mentioned but not in timeline or shortmem,
+# # then the agent is not supposed to recommend the wine
+# if isWineInEvent == false
+
+# errornote = "Previously, You recommend wines that is not in your inventory which is not allowed."
+# error("Previously, You recommend wines that is not in your inventory which is not allowed.")
+# end
+# end
+
+# result = responsedict[:chat]
+
+# return result
+# end
+# error("generatechat failed to generate a response")
+# end
function generatechat(a::companion)
@@ -1135,7 +1441,6 @@ function generatechat(a::companion)
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
-
response = a.text2textInstructLLM(prompt)
return response
@@ -1144,107 +1449,110 @@ function generatechat(a::companion)
end
-function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::String
+function generatequestion(a, text2textInstructLLM::Function;
+ recent::Integer=5)::String
systemmsg =
"""
- Your name is $(a.name). You are a helpful English-speaking, website-based sommelier for $(a.retailername)'s online store.
- Your goal includes:
- 1) Help the user select the best wines from your inventory that align with the user's preferences
- 2) Thanks the user when they don't need any further assistance and invite them to comeback next time
+ Your role:
+ Your name is $(a.name). You are a helpful English-speaking, website-based sommelier for $(a.retailername)'s online store currently talking with the user.
+ Your goal includes:
+ 1) Help the user select the best wines from your inventory that align with the user's preferences
+ 2) Thanks the user when they don't need any further assistance and invite them to comeback next time
- Your responsibility includes:
- 1) Ask yourself what to do about the current situation
+ Your responsibility includes:
+ 1) Ask yourself:
+ - what do you know
+ - what you do not know
+ - what could you do
- Your responsibility does NOT includes:
- 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store.
- 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store.
- 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
+ Your responsibility does NOT includes:
+ 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store.
+ 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store.
+ 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store.
- At each round of conversation, you will be given the current situation:
- Recap: recap of what has happened so far
- Your recent events: latest 5 events of the situation
+ At each round of conversation, you will be given the current situation:
+ Recap: recap of what has happened so far
+ Additional info: ...
+ Your recent events: latest 5 events of the situation
- You must follow the following guidelines:
- - Your question should be specific, self-contained and not require any additional context.
- - Once the user has chose their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
+ You must follow the following guidelines:
+ - Your question should be specific, self-contained and not require any additional context.
+ - Once the user has chose their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- You should follow the following guidelines:
- - Focus on the latest conversation
- - If the user interrupts, prioritize the user
- - If you don't already know, find out the user's budget
- - If you don't already know, find out the type of wine the user is looking for, such as red, white, sparkling, rose, dessert, fortified
- - If you don't already know, find out the occasion for which the user is buying wine
- - If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity
- - If you don't already know, find out what food will be served with wine
- - If you haven't already, introduce the wines you found in the database to the user first
- - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory.
- - All wines in your inventory are always in stock.
- - Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory.
- - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
- - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
- - Medium and full-bodied red wines should not be paired with spicy foods.
- - If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store.
+ You should follow the following guidelines:
+ - Focus on the latest conversation
+ - If the user interrupts, prioritize the user
+ - If you don't already know, find out the user's budget
+ - If you don't already know, find out the type of wine the user is looking for, such as red, white, sparkling, rose, dessert, fortified
+ - If you don't already know, find out the occasion for which the user is buying wine
+ - If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity
+ - If you don't already know, find out what food will be served with wine
+ - If you haven't already, introduce the wines you found in the database to the user first
+ - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory.
+ - All wines in your inventory are always in stock.
+ - Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory.
+ - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database.
+ - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time.
+ - Medium and full-bodied red wines are bad with spicy foods.
+ - If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store.
- You should then respond to the user with:
- 1) Understanding:
- - State your understanding about the current situation
- 2) Q: Given the situation, "ask yourself" at least five, but no more than ten, questions
- 3) A: Given the situation, "answer to yourself" the best you can
- - Do not generate any extra text after you finish answering all questions
+ You should then respond to the user with:
+ 1) Thought: State your thought about the current situation
+ 2) Q: Given the situation, "ask yourself" at least five, but no more than twenty, questions
+ 3) A: Given the situation, "answer to yourself" the best you can. Do not generate any extra text after you finish answering all questions
- You must only respond in format as described below:
- Understanding: ...
- Q1: ...
- A1: ...
- Q2: ...
- A2: ...
- Q3: ...
- A3: ...
- ...
+ You must only respond in format as described below:
+ Thought: ...
+ Q1: ...
+ A1: ...
+ Q2: ...
+ A2: ...
+ ...
- Here are some examples:
- Q: The user is buying for her husband, should I dig in to get more information?
- A: Yes, I should. So that I have better idea about the user's preferences.
-
- Q: Why the user saying this?
- A: According to the situation, ...
-
- Q: The user is asking for a cappuccino. Do I have it at my cafe?
- A: No I don't.
-
- Q: Since I don't have a cappuccino but I have a Late, should I ask if they are okay with that?
- A: Yes, I should.
-
- Q: Are they allergic to milk?
- A: According to the situation, since they mentioned a cappuccino before, it seems they are not allergic to milk.
-
- Q: Have I checked the inventory yet?
- A: According to the situation, no. I need more information.
-
- Q: Should I check the inventory now?
- A: According to the situation, ...
-
- Q: What do I have in the inventory?
- A: According to the situation, ...
-
- Q: Which items are within the user price range? And which items are out of the user price rance?
- A: According to the situation, ...
-
- Q: Do I have them in stock?
- A: According to the situation, ...
-
- Q: Did I introduce them to the user already?
- A: According to the situation, No.
-
- Q: Am I certain about the information I'm going to share with the user, or should I verify the information first?
- A: According to the situation, ...
-
- Let's begin!
+ Here are some examples:
+ Q: The user is buying for her husband, should I dig in to get more information?
+ A: Yes, I should. So that I have better idea about the user's preferences.
+ Q: What the user is looking for?
+ A: The user is asking for a MPV car with 7-seat
+ Q: Why the user saying this?
+ A: The user does not want an SUV because it does not have sliding doors
+ Q: The user is asking for a cappuccino. Do I have it at my cafe?
+ A: No I don't have.
+ Q: Since I don't have a cappuccino but I have a Late, should I ask if they are okay with that?
+ A: Yes, I should.
+ Q: Are they allergic to milk?
+ A: Since they mentioned a cappuccino before, it seems they are not allergic to milk.
+ Q: Have I checked the inventory yet?
+ A: No. I need more information from the user including ...
+ Q: What else do I need to know?
+ A: ...
+ Q: Should I check the inventory now?
+ A: ...
+ Q: What the user intend to do with the car?
+ A: I don't know yet. I will need to ask the user.
+ Q: What do I have in the inventory?
+ A: ...
+ Q: Which items are within the user price range? And which items are out of the user price rance?
+ A: ...
+ Q: Do I have them in stock?
+ A: ...
+ Q: Did I introduce them to the user already?
+ A: Not yet.
+ Q: Am I certain about the information I'm going to share with the user, or should I verify the information first?
+ A: ...
+ Q: What should I do?
+ A: ...
+ Q: What shouldn't I do?
+ A: ...
+ Q: what kind of car suitable for off-road trip?
+ A: A four-wheel drive SUV is a good choice for off-road trips.
+
+ Let's begin!
"""
- header = ["Understanding:", "Q1:"]
- dictkey = ["understanding", "q1"]
+ header = ["Thought:", "Q1:"]
+ dictkey = ["thought", "q1"]
context =
if length(a.memory[:shortmem][:available_wine]) != 0
@@ -1253,23 +1561,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
"N/A"
end
- totalevents = length(a.memory[:events])
- ind =
- if totalevents > recent
- start = totalevents - recent
- start:totalevents
- else
- 1:totalevents
- end
-
- timeline = ""
- for (i, event) in enumerate(a.memory[:events][ind])
- if event[:outcome] === nothing
- timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
- else
- timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
- end
- end
+ recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent)
+ recentevents = a.memory[:events][recent_ind]
+ timeline = createTimeline(recentevents)
errornote = ""
response = nothing # store for show when error msg show up
@@ -1291,11 +1585,15 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
end
for attempt in 1:10
+ if attempt > 1
+ println("\nYiemAgent generatequestion() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ end
+
usermsg =
"""
Recap: $recap)
+ Additional info: $context
Your recent events: $timeline
- Context: $context
$errornote
"""
@@ -1308,78 +1606,67 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
- try
- response = text2textInstructLLM(prompt)
- # make sure generatequestion() don't have wine name that is not from retailer inventory
- # check whether an agent recommend wines before checking inventory or recommend wines
- # outside its inventory
- # ask LLM whether there are any winery mentioned in the response
- mentioned_winery = detectWineryName(a, response)
- if mentioned_winery != "None"
- mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
+ response = text2textInstructLLM(prompt, modelsize="medium")
+ # make sure generatequestion() don't have wine name that is not from retailer inventory
+ # check whether an agent recommend wines before checking inventory or recommend wines
+ # outside its inventory
+ # ask LLM whether there are any winery mentioned in the response
+ mentioned_winery = detectWineryName(a, response)
+ if mentioned_winery != "None"
+ mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
- # check whether the wine is in event
- isWineInEvent = false
- for winename in mentioned_winery
- for event in a.memory[:events]
- if event[:outcome] !== nothing && occursin(winename, event[:outcome])
- isWineInEvent = true
- break
- end
+ # check whether the wine is in event
+ isWineInEvent = false
+ for winename in mentioned_winery
+ for event in a.memory[:events]
+ if event[:outcome] !== nothing && occursin(winename, event[:outcome])
+ isWineInEvent = true
+ break
end
end
-
- # if wine is mentioned but not in timeline or shortmem,
- # then the agent is not supposed to recommend the wine
- if isWineInEvent == false
- errornote = "Previously, You mentioned wines that is not in your inventory which is not allowed."
- error("Previously, You mentioned wines that is not in your inventory which is not allowed.")
- end
end
- # sometime LLM generate more than 1 Understanding:
- understanding_number = count("Understanding:", response)
- if understanding_number > 1
- x = split(response, "Understanding:")[2]
- response = "Understanding:" * x
- end
-
- q_number = count("Q", response)
-
- # check for valid response
- q_atleast = length(a.memory[:events]) <= 2 ? 1 : 3
- if q_number < q_atleast
- error("too few questions only $q_number questions are generated ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
- # check whether "A1" is in the response, if not error.
- elseif !occursin("A1:", response)
- error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
- end
-
- # check whether response has all header
- detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
- errornote = "\nSQL evaluator() response does not have all header"
- continue
- elseif sum(values(detected_kw)) > length(header)
- errornote = "\nSQL evaluator() response has duplicated header"
+ # if wine is mentioned but not in timeline or shortmem,
+ # then the agent is not supposed to recommend the wine
+ if isWineInEvent == false
+ errornote = "Previously, You mentioned wines that is not in your inventory which is not allowed."
continue
end
-
- responsedict = GeneralUtils.textToDict(response, header;
- dictKey=dictkey, symbolkey=true)
- response = "Q1: " * responsedict[:q1]
- println("\ngeneratequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
- pprintln(response)
- return response
- catch e
- io = IOBuffer()
- showerror(io, e)
- errorMsg = String(take!(io))
- st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
- println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end
+
+ q_number = count("Q", response)
+
+ # check for valid response
+ if q_number < 2
+ errornote = "too few questions only $q_number questions are generated previously."
+ println("too few questions only $q_number questions are generated ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ # check whether "A1" is in the response, if not error.
+ elseif !occursin("A1:", response)
+ errornote = "previous response does not have A1"
+ println("\nprevious response does not have A1 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ continue
+ end
+
+ # check whether response has all header
+ detected_kw = GeneralUtils.detect_keyword(header, response)
+ if 0 ∈ values(detected_kw)
+ errornote = "\nYiemAgent generatequestion() response does not have all header"
+ continue
+ elseif sum(values(detected_kw)) > length(header)
+ errornote = "\nYiemAgent generatequestion() response has duplicated header"
+ continue
+ end
+
+ responsedict = GeneralUtils.textToDict(response, header;
+ dictKey=dictkey, symbolkey=true)
+ response = "Q1: " * responsedict[:q1]
+ println("\nYiemAgent generatequestion() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
+ pprintln(response)
+
+ return response
end
- error("generatequestion failed to generate a response ", response)
+ error("YiemAgent generatequestion() failed to generate a response ", response)
end
@@ -1399,8 +1686,8 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
You should follow the following guidelines:
- Use the word "user" and "assistant" instead of their name in the report
- You should then respond to the user with:
- event: a detailed summary for each event without exaggerated details.
+ You should then respond to the user with the following:
+ Event: a detailed summary for each event without exaggerated details.
You must only respond in format as described below:
Event_1: ...
@@ -1420,17 +1707,12 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
header = ["Event_$i:" for i in eachindex(a.memory[:events])]
dictkey = lowercase.(["Event_$i" for i in eachindex(a.memory[:events])])
- if length(a.memory[:events]) <= skiprecent
- return nothing
- end
-
- events = a.memory[:events][1:end-skiprecent]
-
- timeline = createTimeline(a.memory[:events]; skiprecent=skiprecent)
+ ind = GeneralUtils.nonRecentElementsIndex(length(a.memory[:events]), skiprecent)
+ events = a.memory[:events][ind]
+ timeline = createTimeline(events)
errornote = ""
response = nothing # store for show when error msg show up
-
for attempt in 1:10
usermsg = """
Total events: $(length(events))
@@ -1451,18 +1733,23 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
- errornote = "\nYiemAgent generateSituationReport() response does not have all header"
+ kwvalue = [i for i in values(detected_kw)]
+ zeroind = findall(x -> x == 0, kwvalue)
+ missingkeys = [header[i] for i in zeroind]
+ if 0 ∈ values(detected_kw)
+ errornote = "$missingkeys are missing in your previous attempt"
+ println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
elseif sum(values(detected_kw)) > length(header)
- errornote = "\nYiemAgent generateSituationReport() response has duplicated header"
+ errornote = "Your previous response has duplicated events"
+ println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
- println("\ngenerateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\ngenerateSituationReport() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(response)
return responsedict
@@ -1516,16 +1803,16 @@ function detectWineryName(a, text)
try
response = a.func[:text2textInstructLLM](prompt)
- println("\ndetectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\ndetectWineryName() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(response)
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
- errornote = "\nSQL evaluator() response does not have all header"
+ if 0 ∈ values(detected_kw)
+ errornote = "\nYiemAgent detectWineryName() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
- errornote = "\nSQL evaluator() response has duplicated header"
+ errornote = "\nYiemAgent detectWineryName() response has duplicated header"
continue
end
@@ -1540,7 +1827,7 @@ function detectWineryName(a, text)
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
- println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
end
error("detectWineryName failed to generate a response")
diff --git a/src/llmfunction.jl b/src/llmfunction.jl
index 021924d..4f13881 100644
--- a/src/llmfunction.jl
+++ b/src/llmfunction.jl
@@ -291,20 +291,20 @@ julia> result = checkinventory(agent, input)
function checkinventory(a::T1, input::T2
) where {T1<:agent, T2<:AbstractString}
- println("\ncheckinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\ncheckinventory order: $input ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
wineattributes_1 = extractWineAttributes_1(a, input)
wineattributes_2 = extractWineAttributes_2(a, input)
_inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2"
inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}"
- println("\ncheckinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\ncheckinventory input: $inventoryquery ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# add suppport for similarSQLVectorDB
textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL],
a.func[:text2textInstructLLM],
insertSQLVectorDB=a.func[:insertSQLVectorDB],
similarSQLVectorDB=a.func[:similarSQLVectorDB])
- println("\ncheckinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\ncheckinventory result ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
println(textresult)
return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing)
@@ -345,7 +345,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
- Do not generate other comments.
You should then respond to the user with:
- Comprehension: state your understanding of the current situation
+ Thought: state your understanding of the current situation
Wine_name: name of the wine
Winery: name of the winery
Vintage: the year of the wine
@@ -359,7 +359,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
Food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should only respond in format as described below:
- Comprehension: ...
+ Thought: ...
Wine_name: ...
Winery: ...
Vintage: ...
@@ -376,17 +376,19 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
User's query: red, Chenin Blanc, Riesling, 20 USD
{"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red, white", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
- User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc
- {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
+ User's query: Domaine du Collier Saumur Blanc 2019, France, white, Merlot
+ {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Merlot", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
Let's begin!
"""
- header = ["Comprehension:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"]
- dictkey = ["comprehension", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
+ header = ["Thought:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"]
+ dictkey = ["thought", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
errornote = ""
- for attempt in 1:5
+ for attempt in 1:10
+ #[WORKING] I should add generatequestion()
+
usermsg =
"""
User's query: $input
@@ -409,7 +411,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
for word in header
if !occursin(word, response)
errornote = "$word attribute is missing in previous attempts"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -418,7 +420,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
+ if 0 ∈ values(detected_kw)
errornote = "\nYiemAgent extractWineAttributes_1() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
@@ -428,7 +430,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
- delete!(responsedict, :comprehension)
+ delete!(responsedict, :thought)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
@@ -440,14 +442,14 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
checkFlag = false
for i in dictkey
j = Symbol(i)
- if j ∉ [:comprehension, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
+ if j ∉ [:thought, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
# in case j is wine_price it needs to be checked differently because its value is ranged
if j == :wine_price
if responsedict[:wine_price] != "NA"
# check whether wine_price is in ranged number
if !occursin('-', responsedict[:wine_price])
errornote = "wine_price must be a range number"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -462,7 +464,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
# price range like 100-100 is not good
if minprice == maxprice
errornote = "wine_price with minimum equals to maximum is not valid"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -481,7 +483,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
for x in content #check whether price are mentioned in the input
if !occursin("NA", responsedict[j]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag == true
break
end
@@ -640,7 +642,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
+ if 0 ∈ values(detected_kw)
errornote = "\nYiemAgent extractWineAttributes_2() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
@@ -657,7 +659,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
value = responsedict[keyword]
if value != "NA" && !occursin(value, input)
errornote = "WARNING. Keyword $keyword: $value does not appear in the input. You must use information from the input only"
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
@@ -673,7 +675,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
if !occursin("keyword", string(k))
if v !== "NA" && (!occursin('-', v) || length(v) > 5)
errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max."
- println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
end
@@ -766,7 +768,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
# sometime the model response like this "here's how I would respond: ..."
if occursin("respond:", response)
errornote = "You don't need to intro your response"
- error("\nparaphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ error("\nparaphrase() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
response = GeneralUtils.remove_french_accents(response)
response = replace(response, '*'=>"")
@@ -776,7 +778,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
- if sum(values(detected_kw)) < length(header)
+ if 0 ∈ values(detected_kw)
errornote = "\nYiemAgent paraphrase() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
@@ -789,7 +791,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
for i ∈ [:paraphrase]
if length(JSON3.write(responsedict[i])) == 0
- error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ error("$i is empty ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
end
@@ -801,7 +803,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
end
end
- println("\nparaphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\nparaphrase() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(Dict(responsedict))
result = responsedict[:paraphrase]
@@ -812,7 +814,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
- println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+ println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
end
error("paraphrase() failed to generate a response")
@@ -978,7 +980,7 @@ end
# ]
# # put in model format
-# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
+# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
@@ -1010,7 +1012,7 @@ end
# state[:isterminal] = true
# state[:reward] = 1
# end
-# println("--> 5 Evaluator ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
+# println("--> 5 Evaluator ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# pprintln(Dict(responsedict))
# return responsedict[:score]
# catch e
diff --git a/src/util.jl b/src/util.jl
index 37b92ac..bbc92e1 100644
--- a/src/util.jl
+++ b/src/util.jl
@@ -122,47 +122,53 @@ This function takes in a vector of dictionaries and outputs a single string wher
# Arguments
- `vecd::Vector`
- a vector of dictionaries
+ A vector of dictionaries containing chat messages
- `withkey::Bool`
- whether to include the key in the output text. Default is true
+ Whether to include the name as a prefix in the output text. Default is true
+ - `range::Union{Nothing,UnitRange,Int}`
+ Optional range of messages to include. If nothing, includes all messages
-# Return
- a string with the formatted dictionaries
+# Returns
+ A formatted string where each line contains either:
+ - If withkey=true: "name> message\n"
+ - If withkey=false: "message\n"
# Example
-```jldoctest
+
julia> using Revise
julia> using GeneralUtils
julia> vecd = [Dict(:name => "John", :text => "Hello"), Dict(:name => "Jane", :text => "Goodbye")]
julia> GeneralUtils.vectorOfDictToText(vecd, withkey=true)
"John> Hello\nJane> Goodbye\n"
```
-# Signature
"""
-function chatHistoryToText(vecd::Vector; withkey=true)::String
+function chatHistoryToText(vecd::Vector; withkey=true, range=nothing)::String
# Initialize an empty string to hold the final text
text = ""
+ # Get the elements within the specified range, or all elements if no range provided
+ elements = isnothing(range) ? vecd : vecd[range]
+
# Determine whether to include the key in the output text or not
if withkey
- # Loop through each dictionary in the input vector
- for d in vecd
- # Extract the 'name' and 'text' keys from the dictionary
- name = d[:name]
- _text = d[:text]
-
- # Append the formatted string to the text variable
- text *= "$name> $_text \n"
+ # Loop through each dictionary in the input vector
+ for d in elements
+ # Extract the 'name' and 'text' keys from the dictionary
+ name = d[:name]
+ _text = d[:text]
+
+ # Append the formatted string to the text variable
+ text *= "$name:> $_text \n"
end
else
- # Loop through each dictionary in the input vector
- for d in vecd
- # Iterate over all key-value pairs in the dictionary
- for (k, v) in d
- # Append the formatted string to the text variable
- text *= "$v \n"
- end
- end
+ # Loop through each dictionary in the input vector
+ for d in elements
+ # Iterate over all key-value pairs in the dictionary
+ for (k, v) in d
+ # Append the formatted string to the text variable
+ text *= "$v \n"
+ end
+ end
end
# Return the final text
@@ -191,6 +197,35 @@ end
+""" Create a dictionary representing an event with optional details.
+
+# Arguments
+ - `event_description::Union{String, Nothing}`
+ A description of the event
+ - `timestamp::Union{DateTime, Nothing}`
+ The time when the event occurred
+ - `subject::Union{String, Nothing}`
+ The subject or entity associated with the event
+ - `thought::Union{AbstractDict, Nothing}`
+ Any associated thoughts or metadata
+ - `actionname::Union{String, Nothing}`
+ The name of the action performed (e.g., "CHAT", "CHECKINVENTORY")
+ - `actioninput::Union{String, Nothing}`
+ Input or parameters for the action
+ - `location::Union{String, Nothing}`
+ Where the event took place
+ - `equipment_used::Union{String, Nothing}`
+ Equipment involved in the event
+ - `material_used::Union{String, Nothing}`
+ Materials used during the event
+ - `outcome::Union{String, Nothing}`
+ The result or consequence of the event after action execution
+ - `note::Union{String, Nothing}`
+ Additional notes or comments
+
+# Returns
+ A dictionary with event details as symbol-keyed key-value pairs
+"""
function eventdict(;
event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing,
@@ -220,9 +255,33 @@ function eventdict(;
end
-function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVector}
- events = memory[1:end-skiprecent]
+""" Create a formatted timeline string from a sequence of events.
+# Arguments
+ - `events::T1`
+ Vector of event dictionaries containing subject, actioninput and optional outcome fields
+ Each event dictionary should have the following keys:
+ - :subject - The subject or entity performing the action
+ - :actioninput - The action or input performed by the subject
+ - :outcome - (Optional) The result or outcome of the action
+
+# Returns
+ - `timeline::String`
+ A formatted string representing the events with their subjects, actions, and optional outcomes
+ Format: "{subject}> {actioninput} {outcome}\n" for each event
+
+# Example
+
+events = [
+ Dict(:subject => "User", :actioninput => "Hello", :outcome => nothing),
+ Dict(:subject => "Assistant", :actioninput => "Hi there!", :outcome => "with a smile")
+]
+timeline = createTimeline(events)
+# User> Hello
+# Assistant> Hi there! with a smile
+
+"""
+function createTimeline(events::T1) where {T1<:AbstractVector}
timeline = ""
for (i, event) in enumerate(events)
if event[:outcome] === nothing
@@ -236,8 +295,6 @@ function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVe
end
-
-
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example
diff --git a/test/Manifest.toml b/test/Manifest.toml
new file mode 100644
index 0000000..83f035b
--- /dev/null
+++ b/test/Manifest.toml
@@ -0,0 +1,41 @@
+# This file is machine-generated - editing it directly is not advised
+
+julia_version = "1.11.4"
+manifest_format = "2.0"
+project_hash = "71d91126b5a1fb1020e1098d9d492de2a4438fd2"
+
+[[deps.Base64]]
+uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
+version = "1.11.0"
+
+[[deps.InteractiveUtils]]
+deps = ["Markdown"]
+uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
+version = "1.11.0"
+
+[[deps.Logging]]
+uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
+version = "1.11.0"
+
+[[deps.Markdown]]
+deps = ["Base64"]
+uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
+version = "1.11.0"
+
+[[deps.Random]]
+deps = ["SHA"]
+uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+version = "1.11.0"
+
+[[deps.SHA]]
+uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
+version = "0.7.0"
+
+[[deps.Serialization]]
+uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
+version = "1.11.0"
+
+[[deps.Test]]
+deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
+uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+version = "1.11.0"
diff --git a/test/Project.toml b/test/Project.toml
new file mode 100644
index 0000000..0c36332
--- /dev/null
+++ b/test/Project.toml
@@ -0,0 +1,2 @@
+[deps]
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
diff --git a/test/test_1.jl b/test/test_1.jl
index e23b3fd..057a017 100644
--- a/test/test_1.jl
+++ b/test/test_1.jl
@@ -36,13 +36,13 @@ function executeSQLVectorDB(sql)
return result
end
-function text2textInstructLLM(prompt::String; maxattempt=2)
+function text2textInstructLLM(prompt::String; maxattempt::Integer=2, modelsize::String="medium")
msgMeta = GeneralUtils.generate_msgMeta(
config[:externalservice][:loadbalancer][:mqtttopic];
msgPurpose="inference",
senderName="yiemagent",
senderId=sessionId,
- receiverName="text2textinstruct_small",
+ receiverName="text2textinstruct_$modelsize",
mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port],
)
@@ -94,7 +94,11 @@ function getEmbedding(text::T) where {T<:AbstractString}
:text => [text] # must be a vector of string
)
)
- response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120)
+
+ #BUG it returns nothing from ollama
+ response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120, maxattempt=2)
+
+
embedding = response[:response][:embeddings]
return embedding
end
@@ -234,7 +238,7 @@ a = YiemAgent.sommelier(
)
while true
- print("your respond: ")
+ print("\nyour respond: ")
user_answer = readline()
response = YiemAgent.conversation(a, Dict(:text=> user_answer))
println("\n$response")
@@ -244,14 +248,13 @@ end
# response = YiemAgent.conversation(a, Dict(:text=> "I want to get a French red wine under 100."))
-
-
-
-
-
-
-
-
+"""
+hello I want to get a bottle of red wine for my boss. I have a budget around 50 dollars. Show me some options.
+
+I have no idea about his wine taste but he likes spicy food.
+
+
+"""