From 883f581b2a425c430bd6d0cc782a466a9be8b1d3 Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Sat, 22 Mar 2025 15:34:00 +0700 Subject: [PATCH] update --- src/interface.jl | 24 +++++++++++++----------- src/llmfunction.jl | 34 +++++++++++++++++++++------------- test/test_1.jl | 2 +- 3 files changed, 35 insertions(+), 25 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index 39da605..9df7fb2 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -248,7 +248,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen for winename in winenames if !occursin(winename, chathistory) - println("\n~~~ Yiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() found wines from DB ", Dates.now(), " ", @__FILE__, " ", @__LINE__) d = Dict( :understanding=> "I understand that the customer is looking for a wine that matches their intention and budget.", :reasoning=> "I checked the inventory and found wines that match the customer's criteria. I will present the wines to the customer.", @@ -363,7 +363,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen end checkFlag == true ? continue : nothing - println("\n~~~ Yiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nYiem decisionMaker() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines @@ -959,8 +959,10 @@ function generatechat(a::sommelier, thoughtDict) You should only respond in format as described below: Chat: ... - Here are some examples of response format: - Chat: "I see. Let me think about it. I'll get back to you with my recommendation." + Here are some examples: + Your ongoing conversation with the user: "user> hello, I need a new car\n" + Context: "Car previously found in your inventory: 1) Toyota Camry 2020 2) Honda Civic 2021 3) Ford Mustang 2022" + Chat: "Oh, we have a variety of cars available, including the Toyota Camry 2020, the Honda Civic 2021, and the Ford Mustang 2022. Which one would you like to see?" Let's begin! """ @@ -993,7 +995,7 @@ function generatechat(a::sommelier, thoughtDict) usermsg = """ Your ongoing conversation with the user: $chathistory - Contex: $context + $context Your thoughts: $yourthought1 $errornote """ @@ -1024,10 +1026,10 @@ function generatechat(a::sommelier, thoughtDict) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) if sum(values(detected_kw)) < length(header) - errornote = "\nSQL decisionMaker() response does not have all header" + errornote = "\nYiemAgent generatechat() response does not have all header" continue elseif sum(values(detected_kw)) > length(header) - errornote = "\nSQL decisionMaker() response has duplicated header" + errornote = "\nnYiemAgent generatechat() response has duplicated header" continue end @@ -1047,7 +1049,7 @@ function generatechat(a::sommelier, thoughtDict) error("Context: is in text. This is not allowed") end - println("\n~~~ generatechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngeneratechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) # check whether an agent recommend wines before checking inventory or recommend wines @@ -1366,7 +1368,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) response = "Q1: " * responsedict[:q1] - println("\n~~~ generatequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngeneratequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) return response catch e @@ -1460,7 +1462,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent:: responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) - println("\n~~~ generateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ngenerateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) return responsedict @@ -1514,7 +1516,7 @@ function detectWineryName(a, text) try response = a.func[:text2textInstructLLM](prompt) - println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ndetectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(response) # check whether response has all header diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 62c15ec..021924d 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -291,20 +291,20 @@ julia> result = checkinventory(agent, input) function checkinventory(a::T1, input::T2 ) where {T1<:agent, T2<:AbstractString} - println("\n~~~ checkinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__) wineattributes_1 = extractWineAttributes_1(a, input) wineattributes_2 = extractWineAttributes_2(a, input) _inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2" inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}" - println("~~~ checkinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__) # add suppport for similarSQLVectorDB textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL], a.func[:text2textInstructLLM], insertSQLVectorDB=a.func[:insertSQLVectorDB], similarSQLVectorDB=a.func[:similarSQLVectorDB]) - println("\n~~~ checkinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\ncheckinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__) println(textresult) return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing) @@ -731,7 +731,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) - N/A You should then respond to the user with: - 1) Paraphrase: Paraphrased text + Paraphrase: Paraphrased text You should only respond in format as described below: Paraphrase: ... @@ -739,6 +739,9 @@ function paraphrase(text2textInstructLLM::Function, text::String) Let's begin! """ + header = ["Paraphrase:"] + dictkey = ["paraphrase"] + errornote = "" response = nothing # placeholder for show when error msg show up @@ -756,17 +759,14 @@ function paraphrase(text2textInstructLLM::Function, text::String) ] # put in model format - prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct") - prompt *= """ - <|start_header_id|>assistant<|end_header_id|> - """ + prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen") try response = text2textInstructLLM(prompt) # sometime the model response like this "here's how I would respond: ..." if occursin("respond:", response) errornote = "You don't need to intro your response" - error("\n~~~ paraphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + error("\nparaphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end response = GeneralUtils.remove_french_accents(response) response = replace(response, '*'=>"") @@ -774,8 +774,16 @@ function paraphrase(text2textInstructLLM::Function, text::String) response = replace(response, '`' => "") response = GeneralUtils.remove_french_accents(response) - header = ["Paraphrase:"] - dictkey = ["paraphrase"] + # check whether response has all header + detected_kw = GeneralUtils.detect_keyword(header, response) + if sum(values(detected_kw)) < length(header) + errornote = "\nYiemAgent paraphrase() response does not have all header" + continue + elseif sum(values(detected_kw)) > length(header) + errornote = "\nnYiemAgent paraphrase() response has duplicated header" + continue + end + responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) @@ -793,7 +801,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) end end - println("\n~~~ paraphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) + println("\nparaphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) result = responsedict[:paraphrase] @@ -807,7 +815,7 @@ function paraphrase(text2textInstructLLM::Function, text::String) println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__) end end - error("generatechat failed to generate a response") + error("paraphrase() failed to generate a response") end diff --git a/test/test_1.jl b/test/test_1.jl index 5699e2e..e23b3fd 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -161,7 +161,7 @@ function insertSQLVectorDB(query::T1, SQL::T2; maxdistance::Integer=3) where {T1 end -function similarSommelierDecision(recentevents::T1; maxdistance::Integer=5 +function similarSommelierDecision(recentevents::T1; maxdistance::Integer=3 )::Union{AbstractDict, Nothing} where {T1<:AbstractString} tablename = "sommelier_decision_repository" # find similar