From 7d5bc14a0923afddb33ed1a5cac381434203f011 Mon Sep 17 00:00:00 2001 From: tonaerospace Date: Fri, 21 Mar 2025 10:13:53 +0700 Subject: [PATCH 1/6] mark new version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 601d42d..2c976a7 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "YiemAgent" uuid = "e012c34b-7f78-48e0-971c-7abb83b6f0a2" authors = ["narawat lamaiin "] -version = "0.1.3" +version = "0.1.4" [deps] DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" From 5a890860a63ee2c36d9a9600fbc21e485a6d6ede Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Sat, 22 Mar 2025 09:42:51 +0700 Subject: [PATCH 2/6] update --- src/interface.jl | 56 ++++++++++++++++++++++++++++++++++++++++++------ test/config.json | 2 +- test/test_1.jl | 6 +++--- 3 files changed, 54 insertions(+), 10 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index e44aafd..39da605 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -231,6 +231,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen Let's begin! """ + + header = ["Understanding:", "Reasoning:", "Plan:", "Action_name:", "Action_input:"] + dictkey = ["understanding", "reasoning", "plan", "action_name", "action_input"] chathistory = chatHistoryToText(a.chathistory) @@ -316,8 +319,16 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen continue end - header = ["Understanding:", "Reasoning:", "Plan:", "Action_name:", "Action_input:"] - dictkey = ["understanding", "reasoning", "plan", "action_name", "action_input"] + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nSQL evaluator() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nSQL evaluator() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) @@ -1229,6 +1240,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St Let's begin! """ + + header = ["Understanding:", "Q1:"] + dictkey = ["understanding", "q1"] context = if length(a.memory[:shortmem][:available_wine]) != 0 @@ -1339,8 +1353,16 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end - header = ["Understanding:", "Q1:"] - dictkey = ["understanding", "q1"] + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nSQL evaluator() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nSQL evaluator() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) response = "Q1: " * responsedict[:q1] @@ -1424,6 +1446,17 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") response = text2textInstructLLM(prompt) + + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nYiemAgent generateSituationReport() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nYiemAgent generateSituationReport() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) @@ -1459,6 +1492,9 @@ function detectWineryName(a, text) Let's begin! """ + + header = ["Winery_names:"] + dictkey = ["winery_names"] response = nothing # placeholder for show when error msg show up @@ -1481,8 +1517,16 @@ function detectWineryName(a, text) println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) - header = ["Winery_names:"] - dictkey = ["winery_names"] + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nSQL evaluator() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nSQL evaluator() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) diff --git a/test/config.json b/test/config.json index 74c7a75..13ca00f 100644 --- a/test/config.json +++ b/test/config.json @@ -27,7 +27,7 @@ "description": "agent role" }, "organization": { - "value": "yiem_hq", + "value": "yiem_branch_1", "description": "organization name" }, "externalservice": { diff --git a/test/test_1.jl b/test/test_1.jl index aaef67f..5699e2e 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -36,7 +36,7 @@ function executeSQLVectorDB(sql) return result end -function text2textInstructLLM(prompt::String; maxattempt=3) +function text2textInstructLLM(prompt::String; maxattempt=2) msgMeta = GeneralUtils.generate_msgMeta( config[:externalservice][:loadbalancer][:mqtttopic]; msgPurpose="inference", @@ -60,7 +60,7 @@ function text2textInstructLLM(prompt::String; maxattempt=3) response = nothing for attempts in 1:maxattempt - _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=300, maxattempt=maxattempt) + _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=180, maxattempt=maxattempt) payload = _response[:response] if _response[:success] && payload[:text] !== nothing response = _response[:response][:text] @@ -234,7 +234,7 @@ a = YiemAgent.sommelier( ) while true - println("your respond: ") + print("your respond: ") user_answer = readline() response = YiemAgent.conversation(a, Dict(:text=> user_answer)) println("\n$response") From 883f581b2a425c430bd6d0cc782a466a9be8b1d3 Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Sat, 22 Mar 2025 15:34:00 +0700 Subject: [PATCH 3/6] update --- src/interface.jl | 24 +++++++++++++----------- src/llmfunction.jl | 34 +++++++++++++++++++++------------- test/test_1.jl | 2 +- 3 files changed, 35 insertions(+), 25 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index 39da605..9df7fb2 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -248,7 +248,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen for winename in winenames if !occursin(winename, chathistory) - println("\n~~~ Yiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__) d = Dict( :understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.", :reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", @@ -363,7 +363,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen end checkFlag == true ? continue : nothing - println("\n~~~ Yiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines @@ -959,8 +959,10 @@ function generatechat(a::sommelier, thoughtDict) You should only respond in format as described below: Chat: ... - Here are some examples of response format: - Chat: "I see. Let me think about it. I'll get back to you with my recommendation." + Here are some examples: + Your ongoing conversation with the user: "user> hello, I need a new car\n" + Context: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" + Chat: "Oh, we have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" Let's begin! """ @@ -993,7 +995,7 @@ function generatechat(a::sommelier, thoughtDict) usermsg = """ Your ongoing conversation with the user: $chathistory - Contex: $context + $context Your thoughts: $yourthought1 $errornote """ @@ -1024,10 +1026,10 @@ function generatechat(a::sommelier, thoughtDict) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) if sum(values(detected_kw)) < length(header) - errornote = "\nSQL decisionMaker() response does not have all header" + errornote = "\nYiemAgent generatechat() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nSQL decisionMaker() response has duplicated header" + errornote = "\nnYiemAgent generatechat() response has duplicated header" continue end @@ -1047,7 +1049,7 @@ function generatechat(a::sommelier, thoughtDict) error("Context: is in text. This is not allowed") end - println("\n~~~ generatechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngeneratechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines @@ -1366,7 +1368,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) response = "Q1: " * responsedict[:q1] - println("\n~~~ generatequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngeneratequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) return response catch e @@ -1460,7 +1462,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - println("\n~~~ generateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngenerateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) return responsedict @@ -1514,7 +1516,7 @@ function detectWineryName(a, text) try response = a.func[:text2textInstructLLM](prompt) - println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ndetectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) # check whether response has all header diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 62c15ec..021924d 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -291,20 +291,20 @@ julia> result = checkinventory(agent, input) function checkinventory(a::T1, input::T2 ) where {T1<:agent, T2<:AbstractString} - println("\n~~~ checkinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__) wineattributes_1 = extractWineAttributes_1(a, input) wineattributes_2 = extractWineAttributes_2(a, input) _inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2" inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}" - println("~~~ checkinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__) # add suppport for similarSQLVectorDB textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL], a.func[:text2textInstructLLM], insertSQLVectorDB=a.func[:insertSQLVectorDB], similarSQLVectorDB=a.func[:similarSQLVectorDB]) - println("\n~~~ checkinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__) println(textresult) return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing) @@ -731,7 +731,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) - N/A You should then respond to the user with: - 1) Paraphrase: Paraphrased text + Paraphrase: Paraphrased text You should only respond in format as described below: Paraphrase: ... @@ -739,6 +739,9 @@ function paraphrase(text2textInstructLLM::Function, text::String) Let's begin! """ + header = ["Paraphrase:"] + dictkey = ["paraphrase"] + errornote = "" response = nothing # placeholder for show when error msg show up @@ -756,17 +759,14 @@ function paraphrase(text2textInstructLLM::Function, text::String) ] # put in model format - prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct") - prompt *= """ - <|start_header_id|>assistant<|end_header_id|> - """ + prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") try response = text2textInstructLLM(prompt) # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) errornote = "You don't need to intro your response" - error("\n~~~ paraphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + error("\nparaphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -774,8 +774,16 @@ function paraphrase(text2textInstructLLM::Function, text::String) response = replace(response, '`' => "") response = GeneralUtils.remove_french_accents(response) - header = ["Paraphrase:"] - dictkey = ["paraphrase"] + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nYiemAgent paraphrase() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nnYiemAgent paraphrase() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) @@ -793,7 +801,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) end end - println("\n~~~ paraphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nparaphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) result = responsedict[:paraphrase] @@ -807,7 +815,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end end - error("generatechat failed to generate a response") + error("paraphrase() failed to generate a response") end diff --git a/test/test_1.jl b/test/test_1.jl index 5699e2e..e23b3fd 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -161,7 +161,7 @@ function insertSQLVectorDB(query::T1, SQL::T2; maxdistance::Integer=3) where {T1 end -function similarSommelierDecision(recentevents::T1; maxdistance::Integer=5 +function similarSommelierDecision(recentevents::T1; maxdistance::Integer=3 )::Union{AbstractDict, Nothing} where {T1<:AbstractString} tablename = "sommelier_decision_repository" # find similar From b8fd772a28b8b0e96b4e1c7ffc95dd7f59f0b576 Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Mon, 31 Mar 2025 21:30:14 +0700 Subject: [PATCH 4/6] update --- src/interface.jl | 1007 ++++++++++++++++++++++++++++---------------- src/llmfunction.jl | 56 +-- src/util.jl | 111 +++-- test/Manifest.toml | 41 ++ test/Project.toml | 2 + test/test_1.jl | 27 +- 6 files changed, 818 insertions(+), 426 deletions(-) create mode 100644 test/Manifest.toml create mode 100644 test/Project.toml diff --git a/src/interface.jl b/src/interface.jl index 9df7fb2..622ef50 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -97,7 +97,7 @@ julia> output_thoughtDict = Dict( # Signature """ -function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agent} +function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:agent} # lessonDict = copy(JSON3.read("lesson.json")) @@ -124,23 +124,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen # """ # end - totalevents = length(a.memory[:events]) - ind = - if totalevents > recent - start = totalevents - recent - start:totalevents - else - 1:totalevents - end - - recentevents = "" - for (i, event) in enumerate(a.memory[:events][ind]) - if event[:outcome] === nothing - recentevents *= "$i) $(event[:subject])> $(event[:actioninput])\n" - else - recentevents *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" - end - end + recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent) + recentevents = a.memory[:events][recent_ind] + timeline = createTimeline(recentevents) #[TESTING] recap as caching # query similar result from vectorDB @@ -165,7 +151,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen end recentrecap = GeneralUtils.dictToString_noKey(_recentrecap) - similarDecision = a.func[:similarSommelierDecision](recentrecap) + # similarDecision = a.func[:similarSommelierDecision](recentrecap) + similarDecision = nothing #CHANGE if similarDecision !== nothing responsedict = similarDecision @@ -176,7 +163,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. Your goal includes: 1) Establish a connection with the customer by greeting them warmly - 2) Help them select the best wines only from your store's inventory that align with their preferences + 2) Guide them to select the best wines only from your store's inventory that align with their preferences Your responsibility includes: 1) Make an informed decision about what you need to do to achieve the goal @@ -197,7 +184,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen - Approach each customer with open-ended questions to understand their preferences, budget, and occasion. This will help you guide the conversation naturally while gathering essential insights. Once you have this information, you can efficiently check your inventory for the best match. - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. - - Medium and full-bodied red wines should not be paired with spicy foods. + - Spicy foods should not be paired with medium and full-bodied red wines. You should follow the following guidelines: - When searching an inventory, search as broadly as possible based on the information you have gathered so far. @@ -208,23 +195,20 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen - Your store carries only wine. - Vintage 0 means non-vintage. - You should then respond to the user with interleaving Understanding, Reasoning, Plan, Action: - 1) Understanding: - - State your understanding about the current situation. - 2) Reasoning: - - State your step by step reasoning about the current situation. - 3) Plan: Based on the current situation, state a complete plan to complete the task. Be specific. - 4) Action_name (Must be aligned with your plan): The name of the action. Typically corresponds to the execution of the first step in your plan. - Can be one of the following functions: - - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. - - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English. - Good query example: white wine, full-bodied, France, less than 2000 USD. - - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA". - 5) Action_input: input of the action + You should then respond to the user with interleaving Thought, Plan, Action_name, Action_input: + 1) Thought: Articulate your current understanding and consider the present situation. + 2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific. + 3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following function names: + - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. + - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English. + Bad query example 1: red wine that pair well with spicy food. + Bad query example 2: white wine that goes well with party food. + + - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA". + 4) Action_input: input of the action You should only respond in format as described below: - Understanding: ... - Reasoning: ... + Thought: ... Plan: ... Action_name: ... Action_input: ... @@ -232,8 +216,8 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen Let's begin! """ - header = ["Understanding:", "Reasoning:", "Plan:", "Action_name:", "Action_input:"] - dictkey = ["understanding", "reasoning", "plan", "action_name", "action_input"] + header = ["Thought:", "Plan:", "Action_name:", "Action_input:"] + dictkey = ["thought", "plan", "action_name", "action_input"] chathistory = chatHistoryToText(a.chathistory) @@ -248,10 +232,9 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen for winename in winenames if !occursin(winename, chathistory) - println("\nYiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() found wines from DB ", @__FILE__, ":", @__LINE__, " $(Dates.now())") d = Dict( - :understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.", - :reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", + :thought=> "I understand that the customer is looking for a wine that matches their intention and budget. I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", :plan=> "1) Provide detailed introductions of the wines you just found to the customer. 2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience. 3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience. @@ -286,7 +269,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen usermsg = """ $context - Your recent events: $recentevents + Your recent events: $timeline Your Q&A: $QandA) $errornote """ @@ -315,17 +298,17 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen end if count > 1 errornote = "You must use only one function" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) - errornote = "\nSQL evaluator() response does not have all header" + if 0 ∈ values(detected_kw) + errornote = "\nYiemAgent decisionMaker() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nSQL evaluator() response has duplicated header" + errornote = "\nYiemAgent decisionMaker() response has duplicated header" continue end @@ -334,16 +317,15 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen if responsedict[:action_name] ∉ ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] errornote = "You must use the given functions" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end checkFlag = false - for i ∈ [:understanding, :plan, :action_name] + for i ∈ Symbol.(dictkey) if length(responsedict[i]) == 0 - error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__) errornote = "$i is empty" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -352,18 +334,18 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen # check if there are more than 1 key per categories checkFlag = false - for i ∈ [:understanding, :plan, :action_name, :action_input] + for i ∈ Symbol.(dictkey) matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) if length(matchkeys) > 1 errornote = "DecisionMaker has more than one key per categories" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end end checkFlag == true ? continue : nothing - println("\nYiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines @@ -390,7 +372,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen isWineInEvent == false errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first." - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end end @@ -543,7 +525,7 @@ end # showerror(io, e) # errorMsg = String(take!(io)) # st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) -# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) +# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())") # end # end # error("evaluator failed to generate an evaluation") @@ -673,7 +655,7 @@ end # showerror(io, e) # errorMsg = String(take!(io)) # st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) -# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) +# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())") # end # end # error("reflector failed to generate a thought") @@ -864,10 +846,36 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh subject="assistant", thought=thoughtDict, actionname=actionname, - actioninput=chatresponse, + actioninput=actioninput, ) ) result = chatresponse + # if actionname ∈ ["CHATBOX", "ENDCONVERSATION"] + # # chatresponse = generatechat(a, thoughtDict) + # push!(a.memory[:events], + # eventdict(; + # event_description="the assistant talks to the user.", + # timestamp=Dates.now(), + # subject="assistant", + # thought=thoughtDict, + # actionname=actionname, + # actioninput=actioninput, + # ) + # ) + # result = actioninput + # elseif actionname ∈ ["PRESENTBOX"] + # chatresponse = generatechat(a, thoughtDict) + # push!(a.memory[:events], + # eventdict(; + # event_description="the assistant talks to the user.", + # timestamp=Dates.now(), + # subject="assistant", + # thought=thoughtDict, + # actionname=actionname, + # actioninput=chatresponse, + # ) + # ) + # result = chatresponse elseif actionname == "CHECKINVENTORY" if rawresponse !== nothing @@ -895,13 +903,171 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh ) ) else - error("condition is not defined ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + error("condition is not defined ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end return (actionname=actionname, result=result) end +#[WORKING] +function presentbox(a::sommelier, thoughtDict) + systemmsg = + """ + + Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. + + + You have checked the inventory and found wines that match the customer's criteria. + + + Present the wines to the customer in a way that keep the conversation smooth and engaging. + + + Your ongoing conversation with the user: ... + Inventory check result: ... + Your thoughts: Your current thoughts in your mind + + + - Do not offer additional services you didn't think. + - Focus on plan. + + + - Focus on the latest conversation. + - If the user interrupts, prioritize the user + - Be honest + - Medium and full-bodied red wines should not be paired with spicy foods. + + + Chat: ... + + + Your ongoing conversation with the user: "user> hello, I need a new car\n" + Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" + Your thoughts: "I should recommend the car we have to the user." + Chat: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" + + + Let's begin! + """ + + header = ["Chat:"] + dictkey = ["chat"] + + # a.memory[:shortmem][:available_wine] is a vector of dictionary + context = + if length(a.memory[:shortmem][:available_wine]) != 0 + "Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))" + else + "N/A" + end + + chathistory = chatHistoryToText(a.chathistory) + errornote = "" + response = nothing # placeholder for show when error msg show up + + yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" + yourthought1 = nothing + + for attempt in 1:10 + + if attempt > 1 # use to prevent LLM generate the same respond over and over + yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) + else + yourthought1 = yourthought + end + + usermsg = """ + + $chathistory + + + $context + + + $yourthought1 + + $errornote + """ + + _prompt = + [ + Dict(:name => "system", :text => systemmsg), + Dict(:name => "user", :text => usermsg) + ] + + # put in model format + prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") + + response = a.func[:text2textInstructLLM](prompt) + # sometime the model response like this "here's how I would respond: ..." + if occursin("respond:", response) + errornote = "You don't need to intro your response" + error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + response = GeneralUtils.remove_french_accents(response) + response = replace(response, '*'=>"") + response = replace(response, '$' => "USD") + response = replace(response, '`' => "") + response = replace(response, "<|eot_id|>"=>"") + response = GeneralUtils.remove_french_accents(response) + + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if 0 ∈ values(detected_kw) + errornote = "\nYiemAgent generatechat() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nnYiemAgent generatechat() response has duplicated header" + continue + end + + responsedict = GeneralUtils.textToDict(response, header; + dictKey=dictkey, symbolkey=true) + + # check if Context: is in chat + if occursin("Context:", responsedict[:chat]) + error("Context: is in text. This is not allowed") + end + + println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + pprintln(Dict(responsedict)) + + # check whether an agent recommend wines before checking inventory or recommend wines + # outside its inventory + # ask LLM whether there are any winery mentioned in the response + mentioned_winery = detectWineryName(a, responsedict[:chat]) + if mentioned_winery != "None" + mentioned_winery = String.(strip.(split(mentioned_winery, ","))) + + # check whether the wine is in event + isWineInEvent = false + for winename in mentioned_winery + for event in a.memory[:events] + if event[:outcome] !== nothing && occursin(winename, event[:outcome]) + isWineInEvent = true + break + end + end + end + + # if wine is mentioned but not in timeline or shortmem, + # then the agent is not supposed to recommend the wine + if isWineInEvent == false + + errornote = "Previously, You recommend wines that is not in your inventory which is not allowed." + error("Previously, You recommend wines that is not in your inventory which is not allowed.") + end + end + + result = responsedict[:chat] + + return result + end + error("generatechat failed to generate a response") +end + + """ @@ -925,50 +1091,41 @@ julia> function generatechat(a::sommelier, thoughtDict) systemmsg = """ + Your role: Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. - You are currently talking with the user. - Your goal includes: - 1) Help the user select the best wines from your inventory that align with the user's preferences. - - Your responsibility includes: - 1) Given the situation, convey your thoughts to the user. - - Your responsibility does NOT includes: - 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. - 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. - 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. - - At each round of conversation, you will be given the current situation: - Your ongoing conversation with the user: ... - Context: ... + Situation: + You have some thinking in mind while you are talking with the user. + Your mission: + Concentrate on your thoughts and articulate them clearly. Keep the conversation engaging. + Your responsibility does NOT includes: + - Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. + - Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. + - Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. + At each round of conversation, you will be given the following: + Additional info: ... Your thoughts: Your current thoughts in your mind - - You MUST follow the following guidelines: - - Do not offer additional services you didn't thought. - - Focus on plan. - - You should follow the following guidelines: - - Focus on the latest conversation. + Your ongoing conversation with the user: ... + You must follow the following guidelines: + - Do not offer additional services you didn't think + You should follow the following guidelines: + - Focus on the latest conversation - If the user interrupts, prioritize the user - Be honest - - Medium and full-bodied red wines should not be paired with spicy foods. - - You should then respond to the user with: - 1) Chat: Given the situation, How would you respond to the user to express your thoughts honestly and keep the conversation going smoothly? - - You should only respond in format as described below: - Chat: ... - - Here are some examples: + You should then respond to the user with: + Dialogue: what you want to say to the user + You should only respond in format as described below: + Dialogue: ... + Here are some examples: Your ongoing conversation with the user: "user> hello, I need a new car\n" - Context: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" - Chat: "Oh, we have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" + Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" + Your thoughts: "I should recommend the car we have to the user." + Dialogue: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" - Let's begin! + Let's begin! """ - - header = ["Chat:"] - dictkey = ["chat"] + #[WORKING] remove "chat" + header = ["Dialogue:"] + dictkey = ["dialogue"] # a.memory[:shortmem][:available_wine] is a vector of dictionary context = @@ -982,7 +1139,7 @@ function generatechat(a::sommelier, thoughtDict) errornote = "" response = nothing # placeholder for show when error msg show up - yourthought = "$(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])" + yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" yourthought1 = nothing for attempt in 1:10 @@ -993,12 +1150,13 @@ function generatechat(a::sommelier, thoughtDict) yourthought1 = yourthought end - usermsg = """ - Your ongoing conversation with the user: $chathistory - $context - Your thoughts: $yourthought1 - $errornote - """ + usermsg = + """ + $errornote + Additional info: $context + Your thoughts: $yourthought1 + Your ongoing conversation with the user: $chathistory + """ _prompt = [ @@ -1009,89 +1167,237 @@ function generatechat(a::sommelier, thoughtDict) # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - try - response = a.func[:text2textInstructLLM](prompt) - # sometime the model response like this "here's how I would respond: ..." - if occursin("respond:", response) - errornote = "You don't need to intro your response" - error("generatechat() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) - end - response = GeneralUtils.remove_french_accents(response) - response = replace(response, '*'=>"") - response = replace(response, '$' => "USD") - response = replace(response, '`' => "") - response = replace(response, "<|eot_id|>"=>"") - response = GeneralUtils.remove_french_accents(response) + response = a.func[:text2textInstructLLM](prompt) + # sometime the model response like this "here's how I would respond: ..." + if occursin("respond:", response) + errornote = "You don't need to intro your response" + error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + response = GeneralUtils.remove_french_accents(response) + response = replace(response, '*'=>"") + response = replace(response, '$' => "USD") + response = replace(response, '`' => "") + response = replace(response, "<|eot_id|>"=>"") + response = GeneralUtils.remove_french_accents(response) - # check whether response has all header - detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) - errornote = "\nYiemAgent generatechat() response does not have all header" - continue - elseif sum(values(detected_kw)) > length(header) - errornote = "\nnYiemAgent generatechat() response has duplicated header" - continue - end + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + kwvalue = [i for i in values(detected_kw)] + zeroind = findall(x -> x == 0, kwvalue) + missingkeys = [header[i] for i in zeroind] + if 0 ∈ values(detected_kw) + errornote = "$missingkeys are missing from your previous response" + println("\nYiemAgent generatechat() $errornote:\n $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "Your response has duplicated points" + println("\n$errornote: $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + end - responsedict = GeneralUtils.textToDict(response, header; - dictKey=dictkey, symbolkey=true) + responsedict = GeneralUtils.textToDict(response, header; + dictKey=dictkey, symbolkey=true) - # # check if there are more than 1 key per categories - # for i ∈ Symbol.(dictkey) - # matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) - # if length(matchkeys) > 1 - # error("generatechat has more than one key per categories") - # end - # end + # check if Context: is in chat + if occursin("Context:", responsedict[:dialogue]) + println("\nYiemAgent generatechat() context is in response. This is not allowed", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + end - # check if Context: is in chat - if occursin("Context:", responsedict[:chat]) - error("Context: is in text. This is not allowed") - end + println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + pprintln(Dict(responsedict)) - println("\ngeneratechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) - pprintln(Dict(responsedict)) + # check whether an agent recommend wines before checking inventory or recommend wines + # outside its inventory + # ask LLM whether there are any winery mentioned in the response + mentioned_winery = detectWineryName(a, response) + if mentioned_winery != "None" + mentioned_winery = String.(strip.(split(mentioned_winery, ","))) - # check whether an agent recommend wines before checking inventory or recommend wines - # outside its inventory - # ask LLM whether there are any winery mentioned in the response - mentioned_winery = detectWineryName(a, responsedict[:chat]) - if mentioned_winery != "None" - mentioned_winery = String.(strip.(split(mentioned_winery, ","))) - - # check whether the wine is in event - isWineInEvent = false - for winename in mentioned_winery - for event in a.memory[:events] - if event[:outcome] !== nothing && occursin(winename, event[:outcome]) - isWineInEvent = true - break - end + # check whether the wine is in event + isWineInEvent = false + for winename in mentioned_winery + for event in a.memory[:events] + if event[:outcome] !== nothing && occursin(winename, event[:outcome]) + isWineInEvent = true + break end end - - # if wine is mentioned but not in timeline or shortmem, - # then the agent is not supposed to recommend the wine - if isWineInEvent == false - - errornote = "Previously, You recommend wines that is not in your inventory which is not allowed." - error("Previously, You recommend wines that is not in your inventory which is not allowed.") - end end - result = responsedict[:chat] - - return result - catch e - io = IOBuffer() - showerror(io, e) - errorMsg = String(take!(io)) - st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) - println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + # then the agent is not supposed to recommend the wine + if isWineInEvent == false + errornote = "You recommended wines that are not in your inventory before. Please only recommend wines that you have previously found in your inventory." + println("\nERROR YiemAgent generatechat() $errornote $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + end end + result = responsedict[:dialogue] + + return result end error("generatechat failed to generate a response") end +# function generatechat(a::sommelier, thoughtDict) +# systemmsg = +# """ +# +# Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. +# +# +# You have some thinking in mind while you are talking with the user. +# +# +# Concentrate on your thoughts and articulate them clearly. Keep the conversation remains engaging. +# +# +# - Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. +# - Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. +# - Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. +# +# +# Your ongoing conversation with the user: ... +# Additional info: ... +# Your thoughts: Your current thoughts in your mind +# +# +# - Do not offer additional services you didn't think. +# - Focus on plan. +# +# +# - Focus on the latest conversation. +# - If the user interrupts, prioritize the user +# - Be honest +# - Medium and full-bodied red wines should not be paired with spicy foods. +# +# +# Chat: ... +# +# +# Your ongoing conversation with the user: "user> hello, I need a new car\n" +# Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" +# Your thoughts: "I should recommend the car we have to the user." +# Chat: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" +# + +# Let's begin! +# """ + +# header = ["Chat:"] +# dictkey = ["chat"] + +# # a.memory[:shortmem][:available_wine] is a vector of dictionary +# context = +# if length(a.memory[:shortmem][:available_wine]) != 0 +# "Wines previously found in your inventory: $(availableWineToText(a.memory[:shortmem][:available_wine]))" +# else +# "N/A" +# end + +# chathistory = chatHistoryToText(a.chathistory) +# errornote = "" +# response = nothing # placeholder for show when error msg show up + +# yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" +# yourthought1 = nothing + +# for attempt in 1:10 + +# if attempt > 1 # use to prevent LLM generate the same respond over and over +# yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) +# else +# yourthought1 = yourthought +# end + +# usermsg = """ +# +# $chathistory +# +# +# $context +# +# +# $yourthought1 +# +# $errornote +# """ + +# _prompt = +# [ +# Dict(:name => "system", :text => systemmsg), +# Dict(:name => "user", :text => usermsg) +# ] + +# # put in model format +# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") + +# response = a.func[:text2textInstructLLM](prompt) +# # sometime the model response like this "here's how I would respond: ..." +# if occursin("respond:", response) +# errornote = "You don't need to intro your response" +# error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") +# end +# response = GeneralUtils.remove_french_accents(response) +# response = replace(response, '*'=>"") +# response = replace(response, '$' => "USD") +# response = replace(response, '`' => "") +# response = replace(response, "<|eot_id|>"=>"") +# response = GeneralUtils.remove_french_accents(response) + +# # check whether response has all header +# detected_kw = GeneralUtils.detect_keyword(header, response) +# if 0 ∈ values(detected_kw) +# errornote = "\nYiemAgent generatechat() response does not have all header" +# continue +# elseif sum(values(detected_kw)) > length(header) +# errornote = "\nnYiemAgent generatechat() response has duplicated header" +# continue +# end + +# responsedict = GeneralUtils.textToDict(response, header; +# dictKey=dictkey, symbolkey=true) + +# # check if Context: is in chat +# if occursin("Context:", responsedict[:chat]) +# error("Context: is in text. This is not allowed") +# end + +# println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") +# pprintln(Dict(responsedict)) + +# # check whether an agent recommend wines before checking inventory or recommend wines +# # outside its inventory +# # ask LLM whether there are any winery mentioned in the response +# mentioned_winery = detectWineryName(a, responsedict[:chat]) +# if mentioned_winery != "None" +# mentioned_winery = String.(strip.(split(mentioned_winery, ","))) + +# # check whether the wine is in event +# isWineInEvent = false +# for winename in mentioned_winery +# for event in a.memory[:events] +# if event[:outcome] !== nothing && occursin(winename, event[:outcome]) +# isWineInEvent = true +# break +# end +# end +# end + +# # if wine is mentioned but not in timeline or shortmem, +# # then the agent is not supposed to recommend the wine +# if isWineInEvent == false + +# errornote = "Previously, You recommend wines that is not in your inventory which is not allowed." +# error("Previously, You recommend wines that is not in your inventory which is not allowed.") +# end +# end + +# result = responsedict[:chat] + +# return result +# end +# error("generatechat failed to generate a response") +# end function generatechat(a::companion) @@ -1135,7 +1441,6 @@ function generatechat(a::companion) # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - response = a.text2textInstructLLM(prompt) return response @@ -1144,107 +1449,110 @@ function generatechat(a::companion) end -function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::String +function generatequestion(a, text2textInstructLLM::Function; + recent::Integer=5)::String systemmsg = """ - Your name is $(a.name). You are a helpful English-speaking, website-based sommelier for $(a.retailername)'s online store. - Your goal includes: - 1) Help the user select the best wines from your inventory that align with the user's preferences - 2) Thanks the user when they don't need any further assistance and invite them to comeback next time + Your role: + Your name is $(a.name). You are a helpful English-speaking, website-based sommelier for $(a.retailername)'s online store currently talking with the user. + Your goal includes: + 1) Help the user select the best wines from your inventory that align with the user's preferences + 2) Thanks the user when they don't need any further assistance and invite them to comeback next time - Your responsibility includes: - 1) Ask yourself what to do about the current situation + Your responsibility includes: + 1) Ask yourself: + - what do you know + - what you do not know + - what could you do - Your responsibility does NOT includes: - 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. - 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. - 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. + Your responsibility does NOT includes: + 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. + 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. + 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. - At each round of conversation, you will be given the current situation: - Recap: recap of what has happened so far - Your recent events: latest 5 events of the situation + At each round of conversation, you will be given the current situation: + Recap: recap of what has happened so far + Additional info: ... + Your recent events: latest 5 events of the situation - You must follow the following guidelines: - - Your question should be specific, self-contained and not require any additional context. - - Once the user has chose their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. + You must follow the following guidelines: + - Your question should be specific, self-contained and not require any additional context. + - Once the user has chose their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. - You should follow the following guidelines: - - Focus on the latest conversation - - If the user interrupts, prioritize the user - - If you don't already know, find out the user's budget - - If you don't already know, find out the type of wine the user is looking for, such as red, white, sparkling, rose, dessert, fortified - - If you don't already know, find out the occasion for which the user is buying wine - - If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity - - If you don't already know, find out what food will be served with wine - - If you haven't already, introduce the wines you found in the database to the user first - - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory. - - All wines in your inventory are always in stock. - - Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory. - - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. - - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. - - Medium and full-bodied red wines should not be paired with spicy foods. - - If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store. + You should follow the following guidelines: + - Focus on the latest conversation + - If the user interrupts, prioritize the user + - If you don't already know, find out the user's budget + - If you don't already know, find out the type of wine the user is looking for, such as red, white, sparkling, rose, dessert, fortified + - If you don't already know, find out the occasion for which the user is buying wine + - If you don't already know, find out the characteristics of wine the user is looking for, such as tannin, sweetness, intensity, acidity + - If you don't already know, find out what food will be served with wine + - If you haven't already, introduce the wines you found in the database to the user first + - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory. + - All wines in your inventory are always in stock. + - Engage in conversation to indirectly investigate the customer's intention, budget and preferences before checking your inventory. + - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. + - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. + - Medium and full-bodied red wines are bad with spicy foods. + - If a customer requests information about discounts, quantity, rewards programs, promotions, delivery options, boxes, gift wrapping, packaging, or personalized messages, please inform them that they can contact our sales team at the store. - You should then respond to the user with: - 1) Understanding: - - State your understanding about the current situation - 2) Q: Given the situation, "ask yourself" at least five, but no more than ten, questions - 3) A: Given the situation, "answer to yourself" the best you can - - Do not generate any extra text after you finish answering all questions + You should then respond to the user with: + 1) Thought: State your thought about the current situation + 2) Q: Given the situation, "ask yourself" at least five, but no more than twenty, questions + 3) A: Given the situation, "answer to yourself" the best you can. Do not generate any extra text after you finish answering all questions - You must only respond in format as described below: - Understanding: ... - Q1: ... - A1: ... - Q2: ... - A2: ... - Q3: ... - A3: ... - ... + You must only respond in format as described below: + Thought: ... + Q1: ... + A1: ... + Q2: ... + A2: ... + ... - Here are some examples: - Q: The user is buying for her husband, should I dig in to get more information? - A: Yes, I should. So that I have better idea about the user's preferences. - - Q: Why the user saying this? - A: According to the situation, ... - - Q: The user is asking for a cappuccino. Do I have it at my cafe? - A: No I don't. - - Q: Since I don't have a cappuccino but I have a Late, should I ask if they are okay with that? - A: Yes, I should. - - Q: Are they allergic to milk? - A: According to the situation, since they mentioned a cappuccino before, it seems they are not allergic to milk. - - Q: Have I checked the inventory yet? - A: According to the situation, no. I need more information. - - Q: Should I check the inventory now? - A: According to the situation, ... - - Q: What do I have in the inventory? - A: According to the situation, ... - - Q: Which items are within the user price range? And which items are out of the user price rance? - A: According to the situation, ... - - Q: Do I have them in stock? - A: According to the situation, ... - - Q: Did I introduce them to the user already? - A: According to the situation, No. - - Q: Am I certain about the information I'm going to share with the user, or should I verify the information first? - A: According to the situation, ... - - Let's begin! + Here are some examples: + Q: The user is buying for her husband, should I dig in to get more information? + A: Yes, I should. So that I have better idea about the user's preferences. + Q: What the user is looking for? + A: The user is asking for a MPV car with 7-seat + Q: Why the user saying this? + A: The user does not want an SUV because it does not have sliding doors + Q: The user is asking for a cappuccino. Do I have it at my cafe? + A: No I don't have. + Q: Since I don't have a cappuccino but I have a Late, should I ask if they are okay with that? + A: Yes, I should. + Q: Are they allergic to milk? + A: Since they mentioned a cappuccino before, it seems they are not allergic to milk. + Q: Have I checked the inventory yet? + A: No. I need more information from the user including ... + Q: What else do I need to know? + A: ... + Q: Should I check the inventory now? + A: ... + Q: What the user intend to do with the car? + A: I don't know yet. I will need to ask the user. + Q: What do I have in the inventory? + A: ... + Q: Which items are within the user price range? And which items are out of the user price rance? + A: ... + Q: Do I have them in stock? + A: ... + Q: Did I introduce them to the user already? + A: Not yet. + Q: Am I certain about the information I'm going to share with the user, or should I verify the information first? + A: ... + Q: What should I do? + A: ... + Q: What shouldn't I do? + A: ... + Q: what kind of car suitable for off-road trip? + A: A four-wheel drive SUV is a good choice for off-road trips. + + Let's begin! """ - header = ["Understanding:", "Q1:"] - dictkey = ["understanding", "q1"] + header = ["Thought:", "Q1:"] + dictkey = ["thought", "q1"] context = if length(a.memory[:shortmem][:available_wine]) != 0 @@ -1253,23 +1561,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St "N/A" end - totalevents = length(a.memory[:events]) - ind = - if totalevents > recent - start = totalevents - recent - start:totalevents - else - 1:totalevents - end - - timeline = "" - for (i, event) in enumerate(a.memory[:events][ind]) - if event[:outcome] === nothing - timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n" - else - timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" - end - end + recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent) + recentevents = a.memory[:events][recent_ind] + timeline = createTimeline(recentevents) errornote = "" response = nothing # store for show when error msg show up @@ -1291,11 +1585,15 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St end for attempt in 1:10 + if attempt > 1 + println("\nYiemAgent generatequestion() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + usermsg = """ Recap: $recap) + Additional info: $context Your recent events: $timeline - Context: $context $errornote """ @@ -1308,78 +1606,67 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - try - response = text2textInstructLLM(prompt) - # make sure generatequestion() don't have wine name that is not from retailer inventory - # check whether an agent recommend wines before checking inventory or recommend wines - # outside its inventory - # ask LLM whether there are any winery mentioned in the response - mentioned_winery = detectWineryName(a, response) - if mentioned_winery != "None" - mentioned_winery = String.(strip.(split(mentioned_winery, ","))) + response = text2textInstructLLM(prompt, modelsize="medium") + # make sure generatequestion() don't have wine name that is not from retailer inventory + # check whether an agent recommend wines before checking inventory or recommend wines + # outside its inventory + # ask LLM whether there are any winery mentioned in the response + mentioned_winery = detectWineryName(a, response) + if mentioned_winery != "None" + mentioned_winery = String.(strip.(split(mentioned_winery, ","))) - # check whether the wine is in event - isWineInEvent = false - for winename in mentioned_winery - for event in a.memory[:events] - if event[:outcome] !== nothing && occursin(winename, event[:outcome]) - isWineInEvent = true - break - end + # check whether the wine is in event + isWineInEvent = false + for winename in mentioned_winery + for event in a.memory[:events] + if event[:outcome] !== nothing && occursin(winename, event[:outcome]) + isWineInEvent = true + break end end - - # if wine is mentioned but not in timeline or shortmem, - # then the agent is not supposed to recommend the wine - if isWineInEvent == false - errornote = "Previously, You mentioned wines that is not in your inventory which is not allowed." - error("Previously, You mentioned wines that is not in your inventory which is not allowed.") - end end - # sometime LLM generate more than 1 Understanding: - understanding_number = count("Understanding:", response) - if understanding_number > 1 - x = split(response, "Understanding:")[2] - response = "Understanding:" * x - end - - q_number = count("Q", response) - - # check for valid response - q_atleast = length(a.memory[:events]) <= 2 ? 1 : 3 - if q_number < q_atleast - error("too few questions only $q_number questions are generated ", Dates.now(), " ", @__FILE__, " ", @__LINE__) - # check whether "A1" is in the response, if not error. - elseif !occursin("A1:", response) - error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__) - end - - # check whether response has all header - detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) - errornote = "\nSQL evaluator() response does not have all header" - continue - elseif sum(values(detected_kw)) > length(header) - errornote = "\nSQL evaluator() response has duplicated header" + # if wine is mentioned but not in timeline or shortmem, + # then the agent is not supposed to recommend the wine + if isWineInEvent == false + errornote = "Previously, You mentioned wines that is not in your inventory which is not allowed." continue end - - responsedict = GeneralUtils.textToDict(response, header; - dictKey=dictkey, symbolkey=true) - response = "Q1: " * responsedict[:q1] - println("\ngeneratequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) - pprintln(response) - return response - catch e - io = IOBuffer() - showerror(io, e) - errorMsg = String(take!(io)) - st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) - println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end + + q_number = count("Q", response) + + # check for valid response + if q_number < 2 + errornote = "too few questions only $q_number questions are generated previously." + println("too few questions only $q_number questions are generated ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + # check whether "A1" is in the response, if not error. + elseif !occursin("A1:", response) + errornote = "previous response does not have A1" + println("\nprevious response does not have A1 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + end + + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if 0 ∈ values(detected_kw) + errornote = "\nYiemAgent generatequestion() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nYiemAgent generatequestion() response has duplicated header" + continue + end + + responsedict = GeneralUtils.textToDict(response, header; + dictKey=dictkey, symbolkey=true) + response = "Q1: " * responsedict[:q1] + println("\nYiemAgent generatequestion() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + pprintln(response) + + return response end - error("generatequestion failed to generate a response ", response) + error("YiemAgent generatequestion() failed to generate a response ", response) end @@ -1399,8 +1686,8 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: You should follow the following guidelines: - Use the word "user" and "assistant" instead of their name in the report - You should then respond to the user with: - event: a detailed summary for each event without exaggerated details. + You should then respond to the user with the following: + Event: a detailed summary for each event without exaggerated details. You must only respond in format as described below: Event_1: ... @@ -1420,17 +1707,12 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: header = ["Event_$i:" for i in eachindex(a.memory[:events])] dictkey = lowercase.(["Event_$i" for i in eachindex(a.memory[:events])]) - if length(a.memory[:events]) <= skiprecent - return nothing - end - - events = a.memory[:events][1:end-skiprecent] - - timeline = createTimeline(a.memory[:events]; skiprecent=skiprecent) + ind = GeneralUtils.nonRecentElementsIndex(length(a.memory[:events]), skiprecent) + events = a.memory[:events][ind] + timeline = createTimeline(events) errornote = "" response = nothing # store for show when error msg show up - for attempt in 1:10 usermsg = """ Total events: $(length(events)) @@ -1451,18 +1733,23 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) - errornote = "\nYiemAgent generateSituationReport() response does not have all header" + kwvalue = [i for i in values(detected_kw)] + zeroind = findall(x -> x == 0, kwvalue) + missingkeys = [header[i] for i in zeroind] + if 0 ∈ values(detected_kw) + errornote = "$missingkeys are missing in your previous attempt" + println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nYiemAgent generateSituationReport() response has duplicated header" + errornote = "Your previous response has duplicated events" + println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - println("\ngenerateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngenerateSituationReport() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(response) return responsedict @@ -1516,16 +1803,16 @@ function detectWineryName(a, text) try response = a.func[:text2textInstructLLM](prompt) - println("\ndetectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ndetectWineryName() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(response) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) - errornote = "\nSQL evaluator() response does not have all header" + if 0 ∈ values(detected_kw) + errornote = "\nYiemAgent detectWineryName() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nSQL evaluator() response has duplicated header" + errornote = "\nYiemAgent detectWineryName() response has duplicated header" continue end @@ -1540,7 +1827,7 @@ function detectWineryName(a, text) showerror(io, e) errorMsg = String(take!(io)) st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) - println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\n Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end end error("detectWineryName failed to generate a response") diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 021924d..4f13881 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -291,20 +291,20 @@ julia> result = checkinventory(agent, input) function checkinventory(a::T1, input::T2 ) where {T1<:agent, T2<:AbstractString} - println("\ncheckinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory order: $input ", @__FILE__, ":", @__LINE__, " $(Dates.now())") wineattributes_1 = extractWineAttributes_1(a, input) wineattributes_2 = extractWineAttributes_2(a, input) _inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2" inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}" - println("\ncheckinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory input: $inventoryquery ", @__FILE__, ":", @__LINE__, " $(Dates.now())") # add suppport for similarSQLVectorDB textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL], a.func[:text2textInstructLLM], insertSQLVectorDB=a.func[:insertSQLVectorDB], similarSQLVectorDB=a.func[:similarSQLVectorDB]) - println("\ncheckinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory result ", @__FILE__, ":", @__LINE__, " $(Dates.now())") println(textresult) return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing) @@ -345,7 +345,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< - Do not generate other comments. You should then respond to the user with: - Comprehension: state your understanding of the current situation + Thought: state your understanding of the current situation Wine_name: name of the wine Winery: name of the winery Vintage: the year of the wine @@ -359,7 +359,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< Food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc You should only respond in format as described below: - Comprehension: ... + Thought: ... Wine_name: ... Winery: ... Vintage: ... @@ -376,17 +376,19 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< User's query: red, Chenin Blanc, Riesling, 20 USD {"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red, white", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} - User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc - {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} + User's query: Domaine du Collier Saumur Blanc 2019, France, white, Merlot + {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Merlot", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} Let's begin! """ - header = ["Comprehension:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"] - dictkey = ["comprehension", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] + header = ["Thought:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"] + dictkey = ["thought", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] errornote = "" - for attempt in 1:5 + for attempt in 1:10 + #[WORKING] I should add generatequestion() + usermsg = """ User's query: $input @@ -409,7 +411,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< for word in header if !occursin(word, response) errornote = "$word attribute is missing in previous attempts" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -418,7 +420,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) + if 0 ∈ values(detected_kw) errornote = "\nYiemAgent extractWineAttributes_1() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) @@ -428,7 +430,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - delete!(responsedict, :comprehension) + delete!(responsedict, :thought) delete!(responsedict, :tasting_notes) delete!(responsedict, :occasion) delete!(responsedict, :food_to_be_paired_with_wine) @@ -440,14 +442,14 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< checkFlag = false for i in dictkey j = Symbol(i) - if j ∉ [:comprehension, :tasting_notes, :occasion, :food_to_be_paired_with_wine] + if j ∉ [:thought, :tasting_notes, :occasion, :food_to_be_paired_with_wine] # in case j is wine_price it needs to be checked differently because its value is ranged if j == :wine_price if responsedict[:wine_price] != "NA" # check whether wine_price is in ranged number if !occursin('-', responsedict[:wine_price]) errornote = "wine_price must be a range number" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -462,7 +464,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< # price range like 100-100 is not good if minprice == maxprice errornote = "wine_price with minimum equals to maximum is not valid" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -481,7 +483,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< for x in content #check whether price are mentioned in the input if !occursin("NA", responsedict[j]) && !occursin(x, input) errornote = "$x is not mentioned in the user query, you must only use the info from the query." - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag == true break end @@ -640,7 +642,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2< # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) + if 0 ∈ values(detected_kw) errornote = "\nYiemAgent extractWineAttributes_2() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) @@ -657,7 +659,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2< value = responsedict[keyword] if value != "NA" && !occursin(value, input) errornote = "WARNING. Keyword $keyword: $value does not appear in the input. You must use information from the input only" - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -673,7 +675,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2< if !occursin("keyword", string(k)) if v !== "NA" && (!occursin('-', v) || length(v) > 5) errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max." - println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end end @@ -766,7 +768,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) errornote = "You don't need to intro your response" - error("\nparaphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + error("\nparaphrase() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -776,7 +778,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) - if sum(values(detected_kw)) < length(header) + if 0 ∈ values(detected_kw) errornote = "\nYiemAgent paraphrase() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) @@ -789,7 +791,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) for i ∈ [:paraphrase] if length(JSON3.write(responsedict[i])) == 0 - error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + error("$i is empty ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end end @@ -801,7 +803,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) end end - println("\nparaphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nparaphrase() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(Dict(responsedict)) result = responsedict[:paraphrase] @@ -812,7 +814,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) showerror(io, e) errorMsg = String(take!(io)) st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) - println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end end error("paraphrase() failed to generate a response") @@ -978,7 +980,7 @@ end # ] # # put in model format -# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct") +# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") # prompt *= # """ # <|start_header_id|>assistant<|end_header_id|> @@ -1010,7 +1012,7 @@ end # state[:isterminal] = true # state[:reward] = 1 # end -# println("--> 5 Evaluator ", Dates.now(), " ", @__FILE__, " ", @__LINE__) +# println("--> 5 Evaluator ", @__FILE__, ":", @__LINE__, " $(Dates.now())") # pprintln(Dict(responsedict)) # return responsedict[:score] # catch e diff --git a/src/util.jl b/src/util.jl index 37b92ac..bbc92e1 100644 --- a/src/util.jl +++ b/src/util.jl @@ -122,47 +122,53 @@ This function takes in a vector of dictionaries and outputs a single string wher # Arguments - `vecd::Vector` - a vector of dictionaries + A vector of dictionaries containing chat messages - `withkey::Bool` - whether to include the key in the output text. Default is true + Whether to include the name as a prefix in the output text. Default is true + - `range::Union{Nothing,UnitRange,Int}` + Optional range of messages to include. If nothing, includes all messages -# Return - a string with the formatted dictionaries +# Returns + A formatted string where each line contains either: + - If withkey=true: "name> message\n" + - If withkey=false: "message\n" # Example -```jldoctest + julia> using Revise julia> using GeneralUtils julia> vecd = [Dict(:name => "John", :text => "Hello"), Dict(:name => "Jane", :text => "Goodbye")] julia> GeneralUtils.vectorOfDictToText(vecd, withkey=true) "John> Hello\nJane> Goodbye\n" ``` -# Signature """ -function chatHistoryToText(vecd::Vector; withkey=true)::String +function chatHistoryToText(vecd::Vector; withkey=true, range=nothing)::String # Initialize an empty string to hold the final text text = "" + # Get the elements within the specified range, or all elements if no range provided + elements = isnothing(range) ? vecd : vecd[range] + # Determine whether to include the key in the output text or not if withkey - # Loop through each dictionary in the input vector - for d in vecd - # Extract the 'name' and 'text' keys from the dictionary - name = d[:name] - _text = d[:text] - - # Append the formatted string to the text variable - text *= "$name> $_text \n" + # Loop through each dictionary in the input vector + for d in elements + # Extract the 'name' and 'text' keys from the dictionary + name = d[:name] + _text = d[:text] + + # Append the formatted string to the text variable + text *= "$name:> $_text \n" end else - # Loop through each dictionary in the input vector - for d in vecd - # Iterate over all key-value pairs in the dictionary - for (k, v) in d - # Append the formatted string to the text variable - text *= "$v \n" - end - end + # Loop through each dictionary in the input vector + for d in elements + # Iterate over all key-value pairs in the dictionary + for (k, v) in d + # Append the formatted string to the text variable + text *= "$v \n" + end + end end # Return the final text @@ -191,6 +197,35 @@ end +""" Create a dictionary representing an event with optional details. + +# Arguments + - `event_description::Union{String, Nothing}` + A description of the event + - `timestamp::Union{DateTime, Nothing}` + The time when the event occurred + - `subject::Union{String, Nothing}` + The subject or entity associated with the event + - `thought::Union{AbstractDict, Nothing}` + Any associated thoughts or metadata + - `actionname::Union{String, Nothing}` + The name of the action performed (e.g., "CHAT", "CHECKINVENTORY") + - `actioninput::Union{String, Nothing}` + Input or parameters for the action + - `location::Union{String, Nothing}` + Where the event took place + - `equipment_used::Union{String, Nothing}` + Equipment involved in the event + - `material_used::Union{String, Nothing}` + Materials used during the event + - `outcome::Union{String, Nothing}` + The result or consequence of the event after action execution + - `note::Union{String, Nothing}` + Additional notes or comments + +# Returns + A dictionary with event details as symbol-keyed key-value pairs +""" function eventdict(; event_description::Union{String, Nothing}=nothing, timestamp::Union{DateTime, Nothing}=nothing, @@ -220,9 +255,33 @@ function eventdict(; end -function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVector} - events = memory[1:end-skiprecent] +""" Create a formatted timeline string from a sequence of events. +# Arguments + - `events::T1` + Vector of event dictionaries containing subject, actioninput and optional outcome fields + Each event dictionary should have the following keys: + - :subject - The subject or entity performing the action + - :actioninput - The action or input performed by the subject + - :outcome - (Optional) The result or outcome of the action + +# Returns + - `timeline::String` + A formatted string representing the events with their subjects, actions, and optional outcomes + Format: "{subject}> {actioninput} {outcome}\n" for each event + +# Example + +events = [ + Dict(:subject => "User", :actioninput => "Hello", :outcome => nothing), + Dict(:subject => "Assistant", :actioninput => "Hi there!", :outcome => "with a smile") +] +timeline = createTimeline(events) +# User> Hello +# Assistant> Hi there! with a smile + +""" +function createTimeline(events::T1) where {T1<:AbstractVector} timeline = "" for (i, event) in enumerate(events) if event[:outcome] === nothing @@ -236,8 +295,6 @@ function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVe end - - # """ Convert a single chat dictionary into LLM model instruct format. # # Llama 3 instruct format example diff --git a/test/Manifest.toml b/test/Manifest.toml new file mode 100644 index 0000000..83f035b --- /dev/null +++ b/test/Manifest.toml @@ -0,0 +1,41 @@ +# This file is machine-generated - editing it directly is not advised + +julia_version = "1.11.4" +manifest_format = "2.0" +project_hash = "71d91126b5a1fb1020e1098d9d492de2a4438fd2" + +[[deps.Base64]] +uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +version = "1.11.0" + +[[deps.InteractiveUtils]] +deps = ["Markdown"] +uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +version = "1.11.0" + +[[deps.Logging]] +uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +version = "1.11.0" + +[[deps.Markdown]] +deps = ["Base64"] +uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +version = "1.11.0" + +[[deps.Random]] +deps = ["SHA"] +uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +version = "1.11.0" + +[[deps.SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" + +[[deps.Serialization]] +uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +version = "1.11.0" + +[[deps.Test]] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] +uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +version = "1.11.0" diff --git a/test/Project.toml b/test/Project.toml new file mode 100644 index 0000000..0c36332 --- /dev/null +++ b/test/Project.toml @@ -0,0 +1,2 @@ +[deps] +Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/test_1.jl b/test/test_1.jl index e23b3fd..057a017 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -36,13 +36,13 @@ function executeSQLVectorDB(sql) return result end -function text2textInstructLLM(prompt::String; maxattempt=2) +function text2textInstructLLM(prompt::String; maxattempt::Integer=2, modelsize::String="medium") msgMeta = GeneralUtils.generate_msgMeta( config[:externalservice][:loadbalancer][:mqtttopic]; msgPurpose="inference", senderName="yiemagent", senderId=sessionId, - receiverName="text2textinstruct_small", + receiverName="text2textinstruct_$modelsize", mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerPort=config[:mqttServerInfo][:port], ) @@ -94,7 +94,11 @@ function getEmbedding(text::T) where {T<:AbstractString} :text => [text] # must be a vector of string ) ) - response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120) + + #BUG it returns nothing from ollama + response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120, maxattempt=2) + + embedding = response[:response][:embeddings] return embedding end @@ -234,7 +238,7 @@ a = YiemAgent.sommelier( ) while true - print("your respond: ") + print("\nyour respond: ") user_answer = readline() response = YiemAgent.conversation(a, Dict(:text=> user_answer)) println("\n$response") @@ -244,14 +248,13 @@ end # response = YiemAgent.conversation(a, Dict(:text=> "I want to get a French red wine under 100.")) - - - - - - - - +""" +hello I want to get a bottle of red wine for my boss. I have a budget around 50 dollars. Show me some options. + +I have no idea about his wine taste but he likes spicy food. + + +""" From c21f943b12292794e33209b74d8059181589d7b3 Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Tue, 1 Apr 2025 21:17:15 +0700 Subject: [PATCH 5/6] update --- src/interface.jl | 57 +++++++++++++++++++++++++++++----------------- src/llmfunction.jl | 24 +++++++++++-------- src/util.jl | 18 +++++++++++---- test/test_1.jl | 5 +--- 4 files changed, 64 insertions(+), 40 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index 622ef50..cff5752 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -200,9 +200,8 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age 2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific. 3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following function names: - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. - - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English. - Bad query example 1: red wine that pair well with spicy food. - Bad query example 2: white wine that goes well with party food. + - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term is verbal english and it should includes - winery, wine name, vintage, region, country, wine type, grape varietal, tasting notes, wine price, occasion, food to be paired with wine, intensity, tannin, sweetness, acidity. + Bad query example: red wine that pair well with spicy food. - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA". 4) Action_input: input of the action @@ -264,6 +263,10 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age response = nothing # placeholder for show when error msg show up for attempt in 1:10 + if attempt > 1 + println("\nYiemAgent decisionMaker() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + QandA = generatequestion(a, a.func[:text2textInstructLLM]; recent=3) usermsg = @@ -298,17 +301,22 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age end if count > 1 errornote = "You must use only one function" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) + kwvalue = [i for i in values(detected_kw)] + zeroind = findall(x -> x == 0, kwvalue) + missingkeys = [header[i] for i in zeroind] if 0 ∈ values(detected_kw) - errornote = "\nYiemAgent decisionMaker() response does not have all header" + errornote = "$missingkeys are missing from your previous response" + println("\nYiemAgent decisionMaker() $errornote:\n $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nYiemAgent decisionMaker() response has duplicated header" + errornote = "Your response has duplicated points" + println("\nYiemAgent decisionMaker() $errornote: $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -317,7 +325,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age if responsedict[:action_name] ∉ ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] errornote = "You must use the given functions" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -325,7 +333,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age for i ∈ Symbol.(dictkey) if length(responsedict[i]) == 0 errornote = "$i is empty" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -338,7 +346,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) if length(matchkeys) > 1 errornote = "DecisionMaker has more than one key per categories" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -372,7 +380,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age isWineInEvent == false errornote = "Note: Before recommending a wine, ensure it's in your inventory. Check your stock first." - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end end @@ -1116,14 +1124,14 @@ function generatechat(a::sommelier, thoughtDict) You should only respond in format as described below: Dialogue: ... Here are some examples: - Your ongoing conversation with the user: "user> hello, I need a new car\n" Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" - Your thoughts: "I should recommend the car we have to the user." + Your thoughts: "I should recommend the car we have found in our inventory to the user." + Your ongoing conversation with the user: "user> hello, I need a new car\n" Dialogue: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" Let's begin! """ - #[WORKING] remove "chat" + header = ["Dialogue:"] dictkey = ["dialogue"] @@ -1145,6 +1153,7 @@ function generatechat(a::sommelier, thoughtDict) for attempt in 1:10 if attempt > 1 # use to prevent LLM generate the same respond over and over + println("\nYiemAgent generatchat() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) else yourthought1 = yourthought @@ -1166,12 +1175,15 @@ function generatechat(a::sommelier, thoughtDict) # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - response = a.func[:text2textInstructLLM](prompt) + # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) - errornote = "You don't need to intro your response" - error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "You don't need to put 'response:' in your response" + println("ERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + elseif occursin("Your thoughts:", response) || occursin("your thoughts:", response) + errornote = "You don't need to put 'Your thoughts:' in your response" + println("ERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -1651,7 +1663,7 @@ function generatequestion(a, text2textInstructLLM::Function; # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) if 0 ∈ values(detected_kw) - errornote = "\nYiemAgent generatequestion() response does not have all header" + errornote = "\nresponse does not have all header" continue elseif sum(values(detected_kw)) > length(header) errornote = "\nYiemAgent generatequestion() response has duplicated header" @@ -1714,6 +1726,10 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: errornote = "" response = nothing # store for show when error msg show up for attempt in 1:10 + if attempt > 1 # use to prevent LLM generate the same respond over and over + println("\nYiemAgent generateSituationReport() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + usermsg = """ Total events: $(length(events)) Events timeline: $timeline @@ -1738,11 +1754,11 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: missingkeys = [header[i] for i in zeroind] if 0 ∈ values(detected_kw) errornote = "$missingkeys are missing in your previous attempt" - println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nERROR YiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) errornote = "Your previous response has duplicated events" - println("\nYiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nERROR YiemAgent generateSituationReport() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -1757,8 +1773,8 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: error("generateSituationReport failed to generate a response ", response) end + function detectWineryName(a, text) - systemmsg = """ You are a sommelier of a wine store. @@ -1791,7 +1807,6 @@ function detectWineryName(a, text) usermsg = """ Text: $text """ - _prompt = [ Dict(:name => "system", :text => systemmsg), diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 4f13881..c16bdaf 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -389,6 +389,10 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< for attempt in 1:10 #[WORKING] I should add generatequestion() + if attempt > 1 + println("\nYiemAgent extractWineAttributes_1() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + end + usermsg = """ User's query: $input @@ -449,7 +453,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< # check whether wine_price is in ranged number if !occursin('-', responsedict[:wine_price]) errornote = "wine_price must be a range number" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -464,7 +468,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< # price range like 100-100 is not good if minprice == maxprice errornote = "wine_price with minimum equals to maximum is not valid" - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break end @@ -480,14 +484,14 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< content = [content] end - for x in content #check whether price are mentioned in the input - if !occursin("NA", responsedict[j]) && !occursin(x, input) - errornote = "$x is not mentioned in the user query, you must only use the info from the query." - println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") - checkFlag == true - break - end - end + # for x in content #check whether price are mentioned in the input + # if !occursin("NA", responsedict[j]) && !occursin(x, input) + # errornote = "$x is not mentioned in the user query, you must only use the info from the query." + # println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + # checkFlag == true + # break + # end + # end end end end diff --git a/src/util.jl b/src/util.jl index bbc92e1..ba3135a 100644 --- a/src/util.jl +++ b/src/util.jl @@ -268,7 +268,7 @@ end # Returns - `timeline::String` A formatted string representing the events with their subjects, actions, and optional outcomes - Format: "{subject}> {actioninput} {outcome}\n" for each event + Format: "{index}) {subject}> {actioninput} {outcome}\n" for each event # Example @@ -277,24 +277,32 @@ events = [ Dict(:subject => "Assistant", :actioninput => "Hi there!", :outcome => "with a smile") ] timeline = createTimeline(events) -# User> Hello -# Assistant> Hi there! with a smile +# 1) User> Hello +# 2) Assistant> Hi there! with a smile """ function createTimeline(events::T1) where {T1<:AbstractVector} + # Initialize empty timeline string timeline = "" + + # Iterate through events with index for (i, event) in enumerate(events) + # If no outcome exists, format without outcome if event[:outcome] === nothing - timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n" + timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput])\n" + # If outcome exists, include it in formatting else - timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" + timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" end end + # Return formatted timeline string return timeline end + + # """ Convert a single chat dictionary into LLM model instruct format. # # Llama 3 instruct format example diff --git a/test/test_1.jl b/test/test_1.jl index 057a017..27ddfb5 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -95,10 +95,7 @@ function getEmbedding(text::T) where {T<:AbstractString} ) ) - #BUG it returns nothing from ollama - response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120, maxattempt=2) - - + response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120, maxattempt=3) embedding = response[:response][:embeddings] return embedding end From c0edf7dadf91e2de00f273113b5912b2c142a4ab Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Fri, 4 Apr 2025 15:04:02 +0700 Subject: [PATCH 6/6] update --- src/interface.jl | 427 +++++++++++++++++++++++++---------------------- src/util.jl | 22 ++- test/test_1.jl | 2 +- 3 files changed, 247 insertions(+), 204 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index cff5752..6cfb9c4 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -4,7 +4,7 @@ export addNewMessage, conversation, decisionMaker, reflector, generatechat, generalconversation, detectWineryName, generateSituationReport using JSON3, DataStructures, Dates, UUIDs, HTTP, Random, PrettyPrinting, Serialization, - DataFrames + DataFrames, CSV using GeneralUtils using ..type, ..util, ..llmfunction @@ -97,7 +97,8 @@ julia> output_thoughtDict = Dict( # Signature """ -function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:agent} +function decisionMaker(a::T; recent::Integer=10 + ) where {T<:agent} # lessonDict = copy(JSON3.read("lesson.json")) @@ -126,9 +127,9 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent) recentevents = a.memory[:events][recent_ind] - timeline = createTimeline(recentevents) + timeline = createTimeline(recentevents; eventindex=recent_ind) - #[TESTING] recap as caching + # recap as caching # query similar result from vectorDB recapkeys = keys(a.memory[:recap]) _recapkeys_vec = [i for i in recapkeys] @@ -160,59 +161,60 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age else systemmsg = """ - Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. - Your goal includes: - 1) Establish a connection with the customer by greeting them warmly - 2) Guide them to select the best wines only from your store's inventory that align with their preferences + Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. + Your goal includes: + 1) Establish a connection with the customer by greeting them warmly + 2) Guide them to select the best wines only from your store's inventory that align with their preferences - Your responsibility includes: - 1) Make an informed decision about what you need to do to achieve the goal - 2) Thanks the user when they don't need any further assistance and invite them to comeback next time + Your responsibility includes: + 1) Make an informed decision about what you need to do to achieve the goal + 2) Thanks the user when they don't need any further assistance and invite them to comeback next time - Your responsibility does NOT includes: - 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. - 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. - 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. + Your responsibility does NOT includes: + 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. + 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. + 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. - At each round of conversation, you will be given the current situation: - Your recent events: latest 5 events of the situation - Your Q&A: the question and answer you have asked yourself + At each round of conversation, you will be given the following information: + Your recent events: latest 5 events of the situation + Your Q&A: the question and answer you have asked yourself - You must follow the following guidelines: - - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory. - - All wines in your inventory are always in stock. - - Approach each customer with open-ended questions to understand their preferences, budget, and occasion. This will help you guide the conversation naturally while gathering essential insights. Once you have this information, you can efficiently check your inventory for the best match. - - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. - - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. - - Spicy foods should not be paired with medium and full-bodied red wines. + You must follow the following guidelines: + - Focus on the latest event + - Generally speaking, your inventory has some wines from France, the United States, Australia, Spain, and Italy, but you won't know exactly until you check your inventory. + - All wines in your inventory are always in stock + - Approach each customer with open-ended questions to understand their preferences, budget, and occasion. This will help you guide the conversation naturally while gathering essential insights. Once you have this information, you can efficiently check your inventory for the best match. + - Do not ask the user about wine's flavor e.g. floral, citrusy, nutty or some thing similar as these terms cannot be used to search the database. + - Once the user has selected their wine, ask the user if they need any further assistance. Do not offer any additional services. If the user doesn't need any further assistance, say goodbye and invite them to come back next time. + - Spicy foods should not be paired with medium and full-bodied red wines. - You should follow the following guidelines: - - When searching an inventory, search as broadly as possible based on the information you have gathered so far. - - Encourage the customer to explore different options and try new things. - - Sometimes, the item a user desires might not be available in your inventory. In such cases, inform the user that the item is unavailable and suggest an alternative instead. + You should follow the following guidelines: + - When searching an inventory, search as broadly as possible based on the information you have gathered so far. + - Encourage the customer to explore different options and try new things. + - Sometimes, the item a user desires might not be available in your inventory. In such cases, inform the user that the item is unavailable and suggest an alternative instead. - For your information: - - Your store carries only wine. - - Vintage 0 means non-vintage. + For your information: + - Your store carries only wine. + - Vintage 0 means non-vintage. - You should then respond to the user with interleaving Thought, Plan, Action_name, Action_input: - 1) Thought: Articulate your current understanding and consider the present situation. - 2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific. - 3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following function names: - - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. - - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term is verbal english and it should includes - winery, wine name, vintage, region, country, wine type, grape varietal, tasting notes, wine price, occasion, food to be paired with wine, intensity, tannin, sweetness, acidity. - Bad query example: red wine that pair well with spicy food. - - - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA". - 4) Action_input: input of the action + You should then respond to the user with interleaving Thought, Plan, Action_name, Action_input: + 1) Thought: Articulate your current understanding and consider the current situation. + 2) Plan: Based on the current situation, state a complete action plan to complete the task. Be specific. + 3) Action_name: (Typically corresponds to the execution of the first step in your plan) Can be one of the following function names: + - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. + - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term is verbal english and it should includes - winery, wine name, vintage, region, country, wine type, grape varietal, tasting notes, wine price, occasion, food to be paired with wine, intensity, tannin, sweetness, acidity. + Invalid query example: red wine that pair well with spicy food. + - PRESENTBOX which you can use to present wines you have found in your inventory to the user. The input are wine names that you want to present. + - ENDCONVERSATION which you can use when the user has finished their conversation with you, so that you can properly end the conversation. Input is "NA". + 4) Action_input: input of the action - You should only respond in format as described below: - Thought: ... - Plan: ... - Action_name: ... - Action_input: ... + You should only respond in format as described below: + Thought: ... + Plan: ... + Action_name: ... + Action_input: ... - Let's begin! + Let's begin! """ header = ["Thought:", "Plan:", "Action_name:", "Action_input:"] @@ -220,32 +222,6 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age chathistory = chatHistoryToText(a.chathistory) - # check if winename in shortmem occurred in chathistory. if not, skip decision and imediately use PRESENTBOX - if length(a.memory[:shortmem][:found_wine]) != 0 - # check if wine name mentioned in recentevents, only check first wine name is enough - # because agent will recommend every wines it found each time. - winenames = [] - for wine in a.memory[:shortmem][:found_wine] - push!(winenames, wine["wine_name"]) - end - - for winename in winenames - if !occursin(winename, chathistory) - println("\nYiem decisionMaker() found wines from DB ", @__FILE__, ":", @__LINE__, " $(Dates.now())") - d = Dict( - :thought=> "I understand that the customer is looking for a wine that matches their intention and budget. I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", - :plan=> "1) Provide detailed introductions of the wines you just found to the customer. - 2) Explain how the wine could match the customer's intention and what its effects might mean for the customer's experience. - 3) If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the customer's intention and what the potential effects of each option could mean for the customer's experience. - 4) Provide your personal recommendation based on your understanding of the customer's preferences.", - :action_name=> "PRESENTBOX", - :action_input=> "") - a.memory[:shortmem][:found_wine] = [] # clear because PRESENTBOX command is issued. This is to prevent decisionMaker() keep presenting the same wines - return d - end - end - end - context = # may b add wine name instead of the hold wine data is better if length(a.memory[:shortmem][:available_wine]) != 0 winenames = [] @@ -277,14 +253,41 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age $errornote """ - _prompt = + unformatPrompt = [ Dict(:name => "system", :text => systemmsg), Dict(:name => "user", :text => usermsg) ] + #BUG found wine is "count 0" invalid return from CHECKINVENTORY() + # check if winename in shortmem occurred in chathistory. if not, skip decision and imediately use PRESENTBOX + # if length(a.memory[:shortmem][:found_wine]) != 0 + # # check if wine name mentioned in recentevents, only check first wine name is enough + # # because agent will recommend every wines it found each time. + # winenames = [] + # for wine in a.memory[:shortmem][:found_wine] + # push!(winenames, wine["wine_name"]) + # end + + # for winename in winenames + # if !occursin(winename, chathistory) + # println("\nYiem decisionMaker() found wines from DB ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + # d = Dict( + # :thought=> "The user is looking for a wine that matches their intention and budget. I've checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", + # :plan=> "1) I'll provide detailed introductions of the wines I just found to the user. 2) I'll explain how the wine could match the user's intention and what its effects might mean for the user's experience. 3) If multiple wines are available, I'll highlight their differences and provide a comprehensive comparison of how each option aligns with the user's intention and what the potential effects of each option could mean for the user's experience. 4) I'll provide my personal recommendation.", + # :action_name=> "PRESENTBOX", + # :action_input=> "I need to present to the user the following wines: $winenames") + # a.memory[:shortmem][:found_wine] = [] # clear because PRESENTBOX command is issued. This is to prevent decisionMaker() keep presenting the same wines + # result = (systemmsg=systemmsg, usermsg=usermsg, unformatPrompt=unformatPrompt, result=d) + # println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + # pprintln(Dict(d)) + # return result + # end + # end + # end + # change qwen format put in model format - prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") + prompt = GeneralUtils.formatLLMtext(unformatPrompt; formatname="qwen") response = a.func[:text2textInstructLLM](prompt) response = GeneralUtils.remove_french_accents(response) @@ -294,7 +297,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age # check if response contain more than one functions from ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] count = 0 - for i ∈ ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] + for i ∈ ["CHATBOX", "CHECKINVENTORY", "PRESENTBOX", "ENDCONVERSATION"] if occursin(i, response) count += 1 end @@ -323,8 +326,8 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - if responsedict[:action_name] ∉ ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] - errornote = "You must use the given functions" + if responsedict[:action_name] ∉ ["CHATBOX", "CHECKINVENTORY", "PRESENTBOX", "ENDCONVERSATION"] + errornote = "Your previous attempt didn't use the given functions" println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -345,7 +348,7 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age for i ∈ Symbol.(dictkey) matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) if length(matchkeys) > 1 - errornote = "DecisionMaker has more than one key per categories" + errornote = "Your previous attempt has more than one key per categories" println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") checkFlag = true break @@ -353,6 +356,15 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age end checkFlag == true ? continue : nothing + # check if action_name = CHECKINVENTORY and action_input has the words "pairs well" or + # "pair well" in it because it is not a valid query. + detected_kw = GeneralUtils.detect_keyword(["pair", "pairs", "pairing", "well"], responsedict[:action_input]) + if responsedict[:action_name] == "CHECKINVENTORY" && sum(values(detected_kw)) != 0 + errornote = "Your previous attempt has invalid query" + println("\nYiemAgent decisionMaker() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue + end + println("\nYiem decisionMaker() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(Dict(responsedict)) @@ -386,6 +398,20 @@ function decisionMaker(a::T; recent::Integer=10)::Dict{Symbol,Any} where {T<:age end delete!(responsedict, :mentioned_winery) + responsedict[:systemmsg] = systemmsg + responsedict[:usermsg] = usermsg + responsedict[:unformatPrompt] = unformatPrompt + responsedict[:QandA] = QandA + + # store responsedict in decisionlog.csv. if it is the first time, create the file + if !isfile("/appfolder/app/decisionlog.csv") + CSV.write(decisionlog, responsedict) + else + CSV.write(decisionlog, responsedict, append=true) + end + + + return responsedict end @@ -813,9 +839,8 @@ julia> # Signature """ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} where {T<:agent} - a.memory[:recap] = generateSituationReport(a, a.func[:text2textInstructLLM]; skiprecent=0) - thoughtDict = decisionMaker(a; recent=3) + thoughtDict = decisionMaker(a; recent=5) actionname = thoughtDict[:action_name] actioninput = thoughtDict[:action_input] @@ -844,22 +869,9 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh errormsg::Union{AbstractString,Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing success::Bool = haskey(response, :success) ? response[:success] : false - # manage memory (pass msg to generatechat) - if actionname ∈ ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"] - chatresponse = generatechat(a, thoughtDict) - push!(a.memory[:events], - eventdict(; - event_description="the assistant talks to the user.", - timestamp=Dates.now(), - subject="assistant", - thought=thoughtDict, - actionname=actionname, - actioninput=actioninput, - ) - ) - result = chatresponse - # if actionname ∈ ["CHATBOX", "ENDCONVERSATION"] - # # chatresponse = generatechat(a, thoughtDict) + # # manage memory (pass msg to generatechat) + # if actionname ∈ ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"] + # chatresponse = generatechat(a, thoughtDict) # push!(a.memory[:events], # eventdict(; # event_description="the assistant talks to the user.", @@ -870,20 +882,33 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh # actioninput=actioninput, # ) # ) - # result = actioninput - # elseif actionname ∈ ["PRESENTBOX"] - # chatresponse = generatechat(a, thoughtDict) - # push!(a.memory[:events], - # eventdict(; - # event_description="the assistant talks to the user.", - # timestamp=Dates.now(), - # subject="assistant", - # thought=thoughtDict, - # actionname=actionname, - # actioninput=chatresponse, - # ) - # ) # result = chatresponse + if actionname ∈ ["CHATBOX", "ENDCONVERSATION"] + # chatresponse = generatechat(a, thoughtDict) + push!(a.memory[:events], + eventdict(; + event_description="the assistant talks to the user.", + timestamp=Dates.now(), + subject="assistant", + thought=thoughtDict, + actionname=actionname, + actioninput=actioninput, + ) + ) + result = actioninput + elseif actionname ∈ ["PRESENTBOX"] + chatresponse = presentbox(a, thoughtDict) + push!(a.memory[:events], + eventdict(; + event_description="the assistant talks to the user.", + timestamp=Dates.now(), + subject="assistant", + thought=thoughtDict, + actionname=actionname, + actioninput=chatresponse, + ) + ) + result = chatresponse elseif actionname == "CHECKINVENTORY" if rawresponse !== nothing @@ -926,41 +951,34 @@ function presentbox(a::sommelier, thoughtDict) Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. - You have checked the inventory and found wines that match the customer's criteria. + You have checked the inventory and found wines. Present the wines to the customer in a way that keep the conversation smooth and engaging. - - Your ongoing conversation with the user: ... - Inventory check result: ... - Your thoughts: Your current thoughts in your mind + + Additional info: additional information + Chat history: your ongoing conversation with the user + Wine name: name if wines you found. - - - Do not offer additional services you didn't think. - - Focus on plan. - - - Focus on the latest conversation. - - If the user interrupts, prioritize the user - - Be honest - - Medium and full-bodied red wines should not be paired with spicy foods. + - Provide detailed introductions of the wines you've found to the user. + - Explain how the wine could match the user's intention and what its effects might mean for the user's experience. + - If multiple wines are available, highlight their differences and provide a comprehensive comparison of how each option aligns with the user's intention and what the potential effects of each option could mean for the user's experience. + - Provide your personal recommendation and provide a brief explanation of why you recommend it. - Chat: ... + Dialogue: your wine presentation to the user - - Your ongoing conversation with the user: "user> hello, I need a new car\n" - Additional info: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" - Your thoughts: "I should recommend the car we have to the user." - Chat: "We have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" - + + Dialogue: ... + Let's begin! """ - header = ["Chat:"] - dictkey = ["chat"] + header = ["Dialogue:"] + dictkey = ["dialogue"] # a.memory[:shortmem][:available_wine] is a vector of dictionary context = @@ -974,29 +992,26 @@ function presentbox(a::sommelier, thoughtDict) errornote = "" response = nothing # placeholder for show when error msg show up - yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" - yourthought1 = nothing + # yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" + # yourthought1 = nothing for attempt in 1:10 if attempt > 1 # use to prevent LLM generate the same respond over and over - yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) + println("\nYiemAgent presentbox() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + # yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) + # llmkwargs[:temperature] = 0.1 * attempt else - yourthought1 = yourthought + # yourthought1 = yourthought end - usermsg = """ - - $chathistory - - - $context - - - $yourthought1 - - $errornote - """ + usermsg = + """ + $errornote + Additional info: $context + Chat history: $chathistory + Wine name: $(thoughtDict[:action_input]) + """ _prompt = [ @@ -1010,8 +1025,9 @@ function presentbox(a::sommelier, thoughtDict) response = a.func[:text2textInstructLLM](prompt) # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) - errornote = "You don't need to intro your response" - error("generatechat() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "Your previous response contains 'response:' which is not allowed" + println("\nERROR YiemAgent presentbox() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -1023,28 +1039,32 @@ function presentbox(a::sommelier, thoughtDict) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) if 0 ∈ values(detected_kw) - errornote = "\nYiemAgent generatechat() response does not have all header" + errornote = "$missingkeys are missing from your previous response" + println("\nERROR YiemAgent presentbox() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nnYiemAgent generatechat() response has duplicated header" + errornote = "\nYour previous attempt has duplicated points according to the required response format" + println("\nERROR YiemAgent presentbox() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - # check if Context: is in chat - if occursin("Context:", responsedict[:chat]) - error("Context: is in text. This is not allowed") + # check if Context: is in dialogue + if occursin("Context:", responsedict[:dialogue]) + errornote = "Your previous response contains 'Context:' which is not allowed" + println("\nERROR YiemAgent presentbox() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue end - println("\ngeneratechat() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nYiemAgent presentbox() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines # outside its inventory # ask LLM whether there are any winery mentioned in the response - mentioned_winery = detectWineryName(a, responsedict[:chat]) + mentioned_winery = detectWineryName(a, responsedict[:dialogue]) if mentioned_winery != "None" mentioned_winery = String.(strip.(split(mentioned_winery, ","))) @@ -1062,13 +1082,13 @@ function presentbox(a::sommelier, thoughtDict) # if wine is mentioned but not in timeline or shortmem, # then the agent is not supposed to recommend the wine if isWineInEvent == false - - errornote = "Previously, You recommend wines that is not in your inventory which is not allowed." - error("Previously, You recommend wines that is not in your inventory which is not allowed.") + errornote = "Your previous response recommends wines that is not in your inventory which is not allowed" + println("\nERROR YiemAgent presentbox() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + continue end end - result = responsedict[:chat] + result = responsedict[:dialogue] return result end @@ -1111,8 +1131,8 @@ function generatechat(a::sommelier, thoughtDict) - Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. At each round of conversation, you will be given the following: Additional info: ... - Your thoughts: Your current thoughts in your mind Your ongoing conversation with the user: ... + Your thoughts: Your current thoughts in your mind You must follow the following guidelines: - Do not offer additional services you didn't think You should follow the following guidelines: @@ -1150,11 +1170,16 @@ function generatechat(a::sommelier, thoughtDict) yourthought = "$(thoughtDict[:thought]) $(thoughtDict[:plan])" yourthought1 = nothing - for attempt in 1:10 + llmkwargs=Dict( + :num_ctx => 32768, + :temperature => 0.1, + ) + for attempt in 1:10 if attempt > 1 # use to prevent LLM generate the same respond over and over println("\nYiemAgent generatchat() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought) + llmkwargs[:temperature] = 0.1 * attempt else yourthought1 = yourthought end @@ -1163,8 +1188,8 @@ function generatechat(a::sommelier, thoughtDict) """ $errornote Additional info: $context - Your thoughts: $yourthought1 Your ongoing conversation with the user: $chathistory + Your thoughts: $yourthought1 """ _prompt = @@ -1175,15 +1200,15 @@ function generatechat(a::sommelier, thoughtDict) # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - response = a.func[:text2textInstructLLM](prompt) + response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs) # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) - errornote = "You don't need to put 'response:' in your response" - println("ERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "Your previous response contains 'response:' which is not allowed" + println("\nERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") elseif occursin("Your thoughts:", response) || occursin("your thoughts:", response) errornote = "You don't need to put 'Your thoughts:' in your response" - println("ERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -1199,20 +1224,21 @@ function generatechat(a::sommelier, thoughtDict) missingkeys = [header[i] for i in zeroind] if 0 ∈ values(detected_kw) errornote = "$missingkeys are missing from your previous response" - println("\nYiemAgent generatechat() $errornote:\n $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + println("\nERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) - errornote = "Your response has duplicated points" - println("\n$errornote: $response ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "\nYour previous attempt has duplicated points according to the required response format" + println("\nERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - # check if Context: is in chat + # check if Context: is in dialogue if occursin("Context:", responsedict[:dialogue]) - println("\nYiemAgent generatechat() context is in response. This is not allowed", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "Your previous response contains 'Context:' which is not allowed" + println("\nERROR YiemAgent generatechat() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end @@ -1473,17 +1499,14 @@ function generatequestion(a, text2textInstructLLM::Function; 2) Thanks the user when they don't need any further assistance and invite them to comeback next time Your responsibility includes: - 1) Ask yourself: - - what do you know - - what you do not know - - what could you do + 1) From your point of view as a sommelier helping the user, ask yourself multiple questions based on the current situation Your responsibility does NOT includes: 1) Requesting the user to place an order, make a purchase, or confirm the order. These are the job of our sales team at the store. 2) Processing sales orders or engaging in any other sales-related activities. These are the job of our sales team at the store. 3) Answering questions or offering additional services beyond those related to your store's wine recommendations such as discounts, quantity, rewards programs, promotions, delivery options, shipping, boxes, gift wrapping, packaging, personalized messages or something similar. These are the job of our sales team at the store. - At each round of conversation, you will be given the current situation: + At each round of conversation, you will be given the info: Recap: recap of what has happened so far Additional info: ... Your recent events: latest 5 events of the situation @@ -1523,10 +1546,14 @@ function generatequestion(a, text2textInstructLLM::Function; ... Here are some examples: - Q: The user is buying for her husband, should I dig in to get more information? - A: Yes, I should. So that I have better idea about the user's preferences. Q: What the user is looking for? A: The user is asking for a MPV car with 7-seat + Q: What do I know? + A: The user is looking for a car with 7-seat. Our dealer sell these kind of cars + Q: What I do not know? + A: I don't know about the user budget, car's color, powertrain and other user's preferences. + Q: The user is buying for her husband, should I dig in to get more information? + A: Yes, I should. So that I have better idea about the user's preferences. Q: Why the user saying this? A: The user does not want an SUV because it does not have sliding doors Q: The user is asking for a cappuccino. Do I have it at my cafe? @@ -1539,18 +1566,18 @@ function generatequestion(a, text2textInstructLLM::Function; A: No. I need more information from the user including ... Q: What else do I need to know? A: ... - Q: Should I check the inventory now? + Q: Should I check our inventory now? A: ... Q: What the user intend to do with the car? - A: I don't know yet. I will need to ask the user. - Q: What do I have in the inventory? + A: I don't know yet. Let's ask the user. + Q: What do I have in our inventory? A: ... Q: Which items are within the user price range? And which items are out of the user price rance? A: ... - Q: Do I have them in stock? + Q: Do I have what the user is looking for in our stock? A: ... - Q: Did I introduce them to the user already? - A: Not yet. + Q: Did I introduce what I found in our inventory to the user already? + A: According to my conversation with the user, not yet. Q: Am I certain about the information I'm going to share with the user, or should I verify the information first? A: ... Q: What should I do? @@ -1575,7 +1602,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent_ind = GeneralUtils.recentElementsIndex(length(a.memory[:events]), recent) recentevents = a.memory[:events][recent_ind] - timeline = createTimeline(recentevents) + timeline = createTimeline(recentevents; eventindex=recent_ind) errornote = "" response = nothing # store for show when error msg show up @@ -1596,9 +1623,15 @@ function generatequestion(a, text2textInstructLLM::Function; GeneralUtils.dictToString(tempmem) end + llmkwargs=Dict( + :num_ctx => 32768, + :temperature => 0.2, + ) + for attempt in 1:10 if attempt > 1 println("\nYiemAgent generatequestion() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + llmkwargs[:temperature] = 0.1 * attempt end usermsg = @@ -1618,7 +1651,7 @@ function generatequestion(a, text2textInstructLLM::Function; # put in model format prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") - response = text2textInstructLLM(prompt, modelsize="medium") + response = text2textInstructLLM(prompt, modelsize="medium", llmkwargs=llmkwargs) # make sure generatequestion() don't have wine name that is not from retailer inventory # check whether an agent recommend wines before checking inventory or recommend wines # outside its inventory @@ -1649,24 +1682,26 @@ function generatequestion(a, text2textInstructLLM::Function; q_number = count("Q", response) # check for valid response - if q_number < 2 - errornote = "too few questions only $q_number questions are generated previously." - println("too few questions only $q_number questions are generated ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + if q_number < 1 + errornote = "Your previous response has too few questions." + println("\nERROR YiemAgent generatequestion() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue # check whether "A1" is in the response, if not error. elseif !occursin("A1:", response) - errornote = "previous response does not have A1" - println("\nprevious response does not have A1 ", @__FILE__, ":", @__LINE__, " $(Dates.now())") + errornote = "Your previous response does not have A1:" + println("\nERROR YiemAgent generatequestion() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) if 0 ∈ values(detected_kw) - errornote = "\nresponse does not have all header" + errornote = "\nYour previous attempt did not have all points according to the required response format" + println("\nERROR YiemAgent generatequestion() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nYiemAgent generatequestion() response has duplicated header" + errornote = "\nYour previous attempt has duplicated points according to the required response format" + println("\nERROR YiemAgent generatequestion() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end diff --git a/src/util.jl b/src/util.jl index ba3135a..41d6270 100644 --- a/src/util.jl +++ b/src/util.jl @@ -281,18 +281,27 @@ timeline = createTimeline(events) # 2) Assistant> Hi there! with a smile """ -function createTimeline(events::T1) where {T1<:AbstractVector} +function createTimeline(events::T1; eventindex::Union{UnitRange, Nothing}=nothing + ) where {T1<:AbstractVector} # Initialize empty timeline string timeline = "" - # Iterate through events with index - for (i, event) in enumerate(events) + # Determine which indices to use - either provided range or full length + ind = + if eventindex !== nothing + [eventindex...] + else + 1:length(events) + end + + # Iterate through events and format each one + for (i, event) in zip(ind, events) # If no outcome exists, format without outcome if event[:outcome] === nothing - timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput])\n" - # If outcome exists, include it in formatting + timeline *= "Event_$i $(event[:subject])> $(event[:actioninput])\n" + # If outcome exists, include it in formatting else - timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" + timeline *= "Event_$i $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" end end @@ -302,7 +311,6 @@ end - # """ Convert a single chat dictionary into LLM model instruct format. # # Llama 3 instruct format example diff --git a/test/test_1.jl b/test/test_1.jl index 27ddfb5..6500feb 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -83,7 +83,7 @@ function getEmbedding(text::T) where {T<:AbstractString} msgPurpose="embedding", senderName="yiemagent", senderId=sessionId, - receiverName="text2textinstruct_small", + receiverName="textembedding", mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerPort=config[:mqttServerInfo][:port], )