This commit is contained in:
2025-03-20 16:15:38 +07:00
parent c60037226a
commit 8fc3afe348
4 changed files with 120 additions and 77 deletions

View File

@@ -318,9 +318,10 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen
continue continue
end end
responsedict = GeneralUtils.textToDict(response, header = ["Understanding:", "Reasoning:", "Plan:", "Action_name:", "Action_input:"]
["Understanding", "Reasoning", "Plan", "Action_name", "Action_input"], dictkey = ["understanding", "reasoning", "plan", "action_name", "action_input"]
rightmarker=":", symbolkey=true, lowercasekey=true) responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
if responsedict[:action_name] ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] if responsedict[:action_name] ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"]
errornote = "You must use the given functions" errornote = "You must use the given functions"
@@ -999,6 +1000,9 @@ function generatechat(a::sommelier, thoughtDict)
Let's begin! Let's begin!
""" """
header = ["Chat:"]
dictkey = ["chat"]
# a.memory[:shortmem][:available_wine] is a vector of dictionary # a.memory[:shortmem][:available_wine] is a vector of dictionary
context = context =
if length(a.memory[:shortmem][:available_wine]) != 0 if length(a.memory[:shortmem][:available_wine]) != 0
@@ -1054,22 +1058,27 @@ function generatechat(a::sommelier, thoughtDict)
response = replace(response, '`' => "") response = replace(response, '`' => "")
response = replace(response, "<|eot_id|>"=>"") response = replace(response, "<|eot_id|>"=>"")
response = GeneralUtils.remove_french_accents(response) response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response, ["Chat"],
rightmarker=":", symbolkey=true, lowercasekey=true)
for i [:chat] # check whether response has all header
if length(JSON3.write(responsedict[i])) == 0 detected_kw = GeneralUtils.detect_keyword(header, response)
error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__) if sum(values(detected_kw)) < length(header)
end errornote = "\nSQL decisionMaker() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "\nSQL decisionMaker() response has duplicated header"
continue
end end
# check if there are more than 1 key per categories responsedict = GeneralUtils.textToDict(response, header;
for i [:chat] dictKey=dictkey, symbolkey=true)
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
if length(matchkeys) > 1 # # check if there are more than 1 key per categories
error("generatechat has more than one key per categories") # for i ∈ Symbol.(dictkey)
end # matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
end # if length(matchkeys) > 1
# error("generatechat has more than one key per categories")
# end
# end
# check if Context: is in chat # check if Context: is in chat
if occursin("Context:", responsedict[:chat]) if occursin("Context:", responsedict[:chat])
@@ -1385,9 +1394,10 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__) error("no answer found in the response ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
end end
responsedict = GeneralUtils.textToDict(response, header = ["Understanding:", "Q1:"]
["Understanding", "Q1"], dictkey = ["understanding", "q1"]
rightmarker=":", symbolkey=true, lowercasekey=true) responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
response = "Q1: " * responsedict[:q1] response = "Q1: " * responsedict[:q1]
println("\n~~~ generatequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__) println("\n~~~ generatequestion ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response) pprintln(response)
@@ -1469,9 +1479,10 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
""" """
response = text2textInstructLLM(prompt) response = text2textInstructLLM(prompt)
eventheader = ["Event_$i" for i in eachindex(a.memory[:events])] header = ["Event_$i:" for i in eachindex(a.memory[:events])]
responsedict = GeneralUtils.textToDict(response, eventheader, dictkey = lowercase.(["Event_$i" for i in eachindex(a.memory[:events])])
rightmarker=":", symbolkey=true) responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
println("\n~~~ generateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) println("\n~~~ generateSituationReport() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response) pprintln(response)
@@ -1530,8 +1541,10 @@ function detectWineryName(a, text)
println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__) println("\n~~~ detectWineryName() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
pprintln(response) pprintln(response)
responsedict = GeneralUtils.textToDict(response, ["winery_names"], header = ["Winery_names:"]
rightmarker=":", symbolkey=true, lowercasekey=true) dictkey = ["winery_names"]
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
result = responsedict[:winery_names] result = responsedict[:winery_names]

View File

@@ -326,7 +326,7 @@ julia>
# TODO # TODO
- [] update docstring - [] update docstring
- [x] implement the function - [WORKING] implement the function
# Signature # Signature
""" """
@@ -336,31 +336,41 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
""" """
As a helpful sommelier, your task is to extract the user information from the user's query as much as possible to fill out user's preference form. As a helpful sommelier, your task is to extract the user information from the user's query as much as possible to fill out user's preference form.
At each round of conversation, the user will give you the current situation: At each round of conversation, the user will give you the following:
User's query: ... User's query: ...
You must follow the following guidelines: You must follow the following guidelines:
1) If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this. - If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this.
Additionally, words like 'any' or 'unlimited' mean no information is available. Additionally, words like 'any' or 'unlimited' mean no information is available.
2) Do not generate other comments. - Do not generate other comments.
You should then respond to the user with the following points: You should then respond to the user with:
- reasoning: state your understanding of the current situation Comprehension: state your understanding of the current situation
- wine_name: name of the wine Wine_name: name of the wine
- winery: name of the winery Winery: name of the winery
- vintage: the year of the wine Vintage: the year of the wine
- region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc Region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc
- country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States" Country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
- wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified" Wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
- grape_varietal: the name of the primary grape used to make the wine Grape_varietal: the name of the primary grape used to make the wine
- tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc Tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
- wine_price: price range of wine. Wine_price: price range of wine.
- occasion: the occasion the user is having the wine for Occasion: the occasion the user is having the wine for
- food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc Food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should only respond in format as described below:
You should only respond in the user's preference form (JSON) as described below: Comprehension: ...
{"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_varietal": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...} Wine_name: ...
Winery: ...
Vintage: ...
Region: ...
Country: ...
Wine_type:
Grape_varietal: ...
Tasting_notes: ...
Wine_price: ...
Occasion: ...
Food_to_be_paired_with_wine: ...
Here are some example: Here are some example:
User's query: red, Chenin Blanc, Riesling, 20 USD User's query: red, Chenin Blanc, Riesling, 20 USD
@@ -372,7 +382,9 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
Let's begin! Let's begin!
""" """
attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] attributes =
header = ["Comprehension:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"]
dictkey = ["comprehension", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
errornote = "" errornote = ""
for attempt in 1:5 for attempt in 1:5
@@ -389,12 +401,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
] ]
# put in model format # put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct") prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
response = a.func[:text2textInstructLLM](prompt) response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response) response = GeneralUtils.remove_french_accents(response)
@@ -753,8 +760,11 @@ function paraphrase(text2textInstructLLM::Function, text::String)
response = replace(response, '$' => "USD") response = replace(response, '$' => "USD")
response = replace(response, '`' => "") response = replace(response, '`' => "")
response = GeneralUtils.remove_french_accents(response) response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response, ["Paraphrase"],
rightmarker=":", symbolkey=true, lowercasekey=true) header = ["Paraphrase:"]
dictkey = ["paraphrase"]
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
for i [:paraphrase] for i [:paraphrase]
if length(JSON3.write(responsedict[i])) == 0 if length(JSON3.write(responsedict[i])) == 0

View File

@@ -31,26 +31,46 @@
"description": "organization name" "description": "organization name"
}, },
"externalservice": { "externalservice": {
"text2textinstruct": { "loadbalancer": {
"mqtttopic": "/loadbalancer/requestingservice", "mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM", "description": "text to text service with instruct LLM"
"llminfo": { },
"name": "llama3instruct" "text2textinstruct": {
} "mqtttopic": "/loadbalancer/requestingservice",
}, "description": "text to text service with instruct LLM",
"virtualWineCustomer_1": { "llminfo": {
"mqtttopic": "/virtualenvironment/winecustomer", "name": "llama3instruct"
"description": "text to text service with instruct LLM that act as wine customer", }
"llminfo": { },
"name": "llama3instruct" "virtualWineCustomer_1": {
} "mqtttopic": "/virtualenvironment/winecustomer",
}, "description": "text to text service with instruct LLM that act as wine customer",
"text2textchat": { "llminfo": {
"mqtttopic": "/loadbalancer/requestingservice", "name": "llama3instruct"
"description": "text to text service with instruct LLM", }
"llminfo": { },
"name": "llama3instruct" "text2textchat": {
} "mqtttopic": "/loadbalancer/requestingservice",
} "description": "text to text service with instruct LLM",
"llminfo": {
"name": "llama3instruct"
}
},
"wineDB" : {
"description": "A wine database connection info for LibPQ client",
"host": "192.168.88.12",
"port": 10201,
"dbname": "wineDB",
"user": "yiemtechnologies",
"password": "yiemtechnologies@Postgres_0.0"
},
"SQLVectorDB" : {
"description": "A wine database connection info for LibPQ client",
"host": "192.168.88.12",
"port": 10203,
"dbname": "SQLVectorDB",
"user": "yiemtechnologies",
"password": "yiemtechnologies@Postgres_0.0"
}
} }
} }

View File

@@ -8,7 +8,7 @@ using Base.Threads
# load config # load config
config = JSON3.read("./test/config.json") config = JSON3.read("/appfolder/app/dev/YiemAgent/test/config.json")
# config = copy(JSON3.read("../mountvolume/config.json")) # config = copy(JSON3.read("../mountvolume/config.json"))
@@ -32,7 +32,7 @@ function text2textInstructLLM(prompt::String)
msgPurpose="inference", msgPurpose="inference",
senderName="yiemagent", senderName="yiemagent",
senderId=string(uuid4()), senderId=string(uuid4()),
receiverName="text2textinstruct", receiverName="text2textinstruct_small",
mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port], mqttBrokerPort=config[:mqttServerInfo][:port],
) )
@@ -61,7 +61,7 @@ function getEmbedding(text::T) where {T<:AbstractString}
msgPurpose="embedding", msgPurpose="embedding",
senderName="yiemagent", senderName="yiemagent",
senderId=string(uuid4()), senderId=string(uuid4()),
receiverName="text2textinstruct", receiverName="text2textinstruct_small",
mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port], mqttBrokerPort=config[:mqttServerInfo][:port],
) )