12 Commits

Author SHA1 Message Date
narawat lamaiin
c0edf7dadf update 2025-04-04 15:04:02 +07:00
narawat lamaiin
c21f943b12 update 2025-04-01 21:17:15 +07:00
narawat lamaiin
b8fd772a28 update 2025-03-31 21:30:14 +07:00
narawat lamaiin
883f581b2a update 2025-03-22 15:34:00 +07:00
narawat lamaiin
5a890860a6 update 2025-03-22 09:42:51 +07:00
7d5bc14a09 mark new version 2025-03-21 10:13:53 +07:00
ton
37ba3a9d31 Merge pull request 'v0.1.3-dev' (#2) from v0.1.3-dev into main
Reviewed-on: #2
2025-03-21 03:09:16 +00:00
bfadd53033 update 2025-03-21 10:03:08 +07:00
8fc3afe348 update 2025-03-20 16:15:38 +07:00
c60037226a update 2025-03-13 19:11:20 +07:00
narawat lamaiin
db6c9c5f2b update 2025-03-07 13:34:15 +07:00
narawat lamaiin
6504099959 update 2025-01-31 09:50:44 +07:00
10 changed files with 1684 additions and 1138 deletions

View File

@@ -1,7 +1,7 @@
name = "YiemAgent"
uuid = "e012c34b-7f78-48e0-971c-7abb83b6f0a2"
authors = ["narawat lamaiin <narawat@outlook.com>"]
version = "0.1.2"
version = "0.1.4"
[deps]
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"

File diff suppressed because it is too large Load Diff

View File

@@ -291,20 +291,20 @@ julia> result = checkinventory(agent, input)
function checkinventory(a::T1, input::T2
) where {T1<:agent, T2<:AbstractString}
println("\n~~~ checkinventory order: $input ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("\ncheckinventory order: $input ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
wineattributes_1 = extractWineAttributes_1(a, input)
wineattributes_2 = extractWineAttributes_2(a, input)
_inventoryquery = "retailer name: $(a.retailername), $wineattributes_1, $wineattributes_2"
inventoryquery = "Retrieves winery, wine_name, vintage, region, country, wine_type, grape, serving_temperature, sweetness, intensity, tannin, acidity, tasting_notes, price and currency of wines that match the following criteria - {$_inventoryquery}"
println("~~~ checkinventory input: $inventoryquery ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("\ncheckinventory input: $inventoryquery ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# add suppport for similarSQLVectorDB
textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL],
a.func[:text2textInstructLLM],
insertSQLVectorDB=a.func[:insertSQLVectorDB],
similarSQLVectorDB=a.func[:similarSQLVectorDB])
println("\n~~~ checkinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("\ncheckinventory result ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
println(textresult)
return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing)
@@ -326,7 +326,7 @@ julia>
# TODO
- [] update docstring
- [x] implement the function
- implement the function
# Signature
"""
@@ -336,46 +336,63 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
"""
As a helpful sommelier, your task is to extract the user information from the user's query as much as possible to fill out user's preference form.
At each round of conversation, the user will give you the current situation:
At each round of conversation, the user will give you the following:
User's query: ...
You must follow the following guidelines:
1) If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this.
- If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this.
Additionally, words like 'any' or 'unlimited' mean no information is available.
2) Do not generate other comments.
- Do not generate other comments.
You should then respond to the user with the following points:
- reasoning: state your understanding of the current situation
- wine_name: name of the wine
- winery: name of the winery
- vintage: the year of the wine
- region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc
- country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
- wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
- grape_varietal: the name of the primary grape used to make the wine
- tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
- wine_price: price range of wine.
- occasion: the occasion the user is having the wine for
- food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should then respond to the user with:
Thought: state your understanding of the current situation
Wine_name: name of the wine
Winery: name of the winery
Vintage: the year of the wine
Region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc
Country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States"
Wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified"
Grape_varietal: the name of the primary grape used to make the wine
Tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc
Wine_price: price range of wine.
Occasion: the occasion the user is having the wine for
Food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc
You should only respond in the user's preference form (JSON) as described below:
{"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_varietal": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...}
You should only respond in format as described below:
Thought: ...
Wine_name: ...
Winery: ...
Vintage: ...
Region: ...
Country: ...
Wine_type:
Grape_varietal: ...
Tasting_notes: ...
Wine_price: ...
Occasion: ...
Food_to_be_paired_with_wine: ...
Here are some example:
User's query: red, Chenin Blanc, Riesling, 20 USD
{"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red, white", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc
{"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
User's query: Domaine du Collier Saumur Blanc 2019, France, white, Merlot
{"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Merlot", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"}
Let's begin!
"""
attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
header = ["Thought:", "Wine_name:", "Winery:", "Vintage:", "Region:", "Country:", "Wine_type:", "Grape_varietal:", "Tasting_notes:", "Wine_price:", "Occasion:", "Food_to_be_paired_with_wine:"]
dictkey = ["thought", "wine_name", "winery", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"]
errornote = ""
for attempt in 1:5
for attempt in 1:10
#[WORKING] I should add generatequestion()
if attempt > 1
println("\nYiemAgent extractWineAttributes_1() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
usermsg =
"""
User's query: $input
@@ -389,32 +406,35 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
response = a.func[:text2textInstructLLM](prompt)
response = GeneralUtils.remove_french_accents(response)
# check wheter all attributes are in the response
checkFlag = false
for word in attributes
for word in header
if !occursin(word, response)
errornote = "$word attribute is missing in previous attempts"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
end
checkFlag == true ? continue : nothing
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
if 0 values(detected_kw)
errornote = "\nYiemAgent extractWineAttributes_1() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "\nYiemAgent extractWineAttributes_1() response has duplicated header"
continue
end
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
responsedict = copy(JSON3.read(response))
# convert
delete!(responsedict, :reasoning)
delete!(responsedict, :thought)
delete!(responsedict, :tasting_notes)
delete!(responsedict, :occasion)
delete!(responsedict, :food_to_be_paired_with_wine)
@@ -424,16 +444,16 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
# check if winery, wine_name, region, country, wine_type, grape_varietal's value are in the query because sometime AI halucinates
checkFlag = false
for i in attributes
for i in dictkey
j = Symbol(i)
if j [:reasoning, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
if j [:thought, :tasting_notes, :occasion, :food_to_be_paired_with_wine]
# in case j is wine_price it needs to be checked differently because its value is ranged
if j == :wine_price
if responsedict[:wine_price] != "NA"
# check whether wine_price is in ranged number
if !occursin('-', responsedict[:wine_price])
errornote = "wine_price must be a range number"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -448,7 +468,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
# price range like 100-100 is not good
if minprice == maxprice
errornote = "wine_price with minimum equals to maximum is not valid"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
checkFlag = true
break
end
@@ -464,14 +484,14 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<
content = [content]
end
for x in content #check whether price are mentioned in the input
if !occursin("NA", responsedict[j]) && !occursin(x, input)
errornote = "$x is not mentioned in the user query, you must only use the info from the query."
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
checkFlag == true
break
end
end
# for x in content #check whether price are mentioned in the input
# if !occursin("NA", responsedict[j]) && !occursin(x, input)
# errornote = "$x is not mentioned in the user query, you must only use the info from the query."
# println("ERROR YiemAgent extractWineAttributes_1() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# checkFlag == true
# break
# end
# end
end
end
end
@@ -509,7 +529,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
conversiontable =
"""
Conversion Table:
<Conversion Table>
Intensity level:
1 to 2: May correspond to "light-bodied" or a similar description.
2 to 3: May correspond to "med light bodied", "medium light" or a similar description.
@@ -534,6 +554,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
3 to 4: May correspond to "medium acidity" or a similar description.
4 to 5: May correspond to "semi high acidity" or a similar description.
4 to 5: May correspond to "high acidity" or a similar description.
</Conversion Table>
"""
systemmsg =
@@ -547,67 +568,64 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
The preference form requires the following information:
sweetness, acidity, tannin, intensity
You must follow the following guidelines:
<You must follow the following guidelines>
1) If specific information required in the preference form is not available in the query or there isn't any, mark with 'NA' to indicate this.
Additionally, words like 'any' or 'unlimited' mean no information is available.
2) Use the conversion table to convert the descriptive word level of sweetness, intensity, tannin, and acidity into a corresponding integer.
3) Do not generate other comments.
</You must follow the following guidelines>
You should then respond to the user with the following points:
- sweetness_keyword: The exact keywords in the user's query describing the sweetness level of the wine.
- sweetness: ( S ), where ( S ) represents integers indicating the range of sweetness levels. Example: 1-2
- acidity_keyword: The exact keywords in the user's query describing the acidity level of the wine.
- acidity: ( A ), where ( A ) represents integers indicating the range of acidity level. Example: 3-5
- tannin_keyword: The exact keywords in the user's query describing the tannin level of the wine.
- tannin: ( T ), where ( T ) represents integers indicating the range of tannin level. Example: 1-3
- intensity_keyword: The exact keywords in the user's query describing the intensity level of the wine.
- intensity: ( I ), where ( I ) represents integers indicating the range of intensity level. Example: 2-4
<You should then respond to the user with>
Sweetness_keyword: The exact keywords in the user's query describing the sweetness level of the wine.
Sweetness: ( S ), where ( S ) represents integers indicating the range of sweetness levels. Example: 1-2
Acidity_keyword: The exact keywords in the user's query describing the acidity level of the wine.
Acidity: ( A ), where ( A ) represents integers indicating the range of acidity level. Example: 3-5
Tannin_keyword: The exact keywords in the user's query describing the tannin level of the wine.
Tannin: ( T ), where ( T ) represents integers indicating the range of tannin level. Example: 1-3
Intensity_keyword: The exact keywords in the user's query describing the intensity level of the wine.
Intensity: ( I ), where ( I ) represents integers indicating the range of intensity level. Example: 2-4
</You should then respond to the user with>
You should only respond in the form (JSON) as described below:
{
"sweetness_keyword": ...,
"sweetness": ...,
"acidity_keyword": ...,
"acidity": ...,
"tannin_keyword": ...,
"tannin": ...,
"intensity_keyword": ...,
"intensity": ...
}
<You should only respond in format as described below>
Sweetness_keyword: ...
Sweetness: ...
Acidity_keyword: ...
Acidity: ...
Tannin_keyword: ...
Tannin: ...
Intensity_keyword: ...
Intensity: ...
</You should only respond in format as described below>
Here are some examples:
User's query: I want a wine with a medium-bodied, low acidity, medium tannin.
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "low acidity",
"acidity": "1-2",
"tannin_keyword": "medium tannin",
"tannin": "3-4",
"intensity_keyword": "medium-bodied",
"intensity": "3-4"
}
User's query: German red wine, under 100, pairs with spicy food
{
"sweetness_keyword": "NA",
"sweetness": "NA",
"acidity_keyword": "NA",
"acidity": "NA",
"tannin_keyword": "NA",
"tannin": "NA",
"intensity_keyword": "NA",
"intensity": "NA"
}
<Here are some examples>
User's query: I want a wine with a medium-bodied, low acidity, medium tannin.
Sweetness_keyword: NA
Sweetness: NA
Acidity_keyword: low acidity
Acidity: 1-2
Tannin_keyword: medium tannin
Tannin: 3-4
Intensity_keyword: medium-bodied
Intensity: 3-4
User's query: German red wine, under 100, pairs with spicy food
Sweetness_keyword: NA
Sweetness: NA
Acidity_keyword: NA
Acidity: NA
Tannin_keyword: NA
Tannin: NA
Intensity_keyword: NA
Intensity: NA
</Here are some examples>
Let's begin!
"""
header = ["Sweetness_keyword:", "Sweetness:", "Acidity_keyword:", "Acidity:", "Tannin_keyword:", "Tannin:", "Intensity_keyword:", "Intensity:"]
dictkey = ["sweetness_keyword", "sweetness", "acidity_keyword", "acidity", "tannin_keyword", "tannin", "intensity_keyword", "intensity"]
errornote = ""
for attempt in 1:5
for attempt in 1:10
usermsg =
"""
$conversiontable
@@ -622,14 +640,22 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *=
"""
<|start_header_id|>assistant<|end_header_id|>
"""
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
response = a.func[:text2textInstructLLM](prompt)
responsedict = copy(JSON3.read(response))
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
if 0 values(detected_kw)
errornote = "\nYiemAgent extractWineAttributes_2() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "\nYiemAgent extractWineAttributes_2() response has duplicated header"
continue
end
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
# check whether each describing keyword is in the input to prevent halucination
for i in ["sweetness", "acidity", "tannin", "intensity"]
@@ -637,7 +663,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
value = responsedict[keyword]
if value != "NA" && !occursin(value, input)
errornote = "WARNING. Keyword $keyword: $value does not appear in the input. You must use information from the input only"
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
@@ -653,7 +679,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
if !occursin("keyword", string(k))
if v !== "NA" && (!occursin('-', v) || length(v) > 5)
errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max."
println("Attempt $attempt $errornote ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("Attempt $attempt $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
continue
end
end
@@ -711,7 +737,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
- N/A
You should then respond to the user with:
1) Paraphrase: Paraphrased text
Paraphrase: Paraphrased text
You should only respond in format as described below:
Paraphrase: ...
@@ -719,6 +745,9 @@ function paraphrase(text2textInstructLLM::Function, text::String)
Let's begin!
"""
header = ["Paraphrase:"]
dictkey = ["paraphrase"]
errornote = ""
response = nothing # placeholder for show when error msg show up
@@ -736,29 +765,37 @@ function paraphrase(text2textInstructLLM::Function, text::String)
]
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
prompt *= """
<|start_header_id|>assistant<|end_header_id|>
"""
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
try
response = text2textInstructLLM(prompt)
# sometime the model response like this "here's how I would respond: ..."
if occursin("respond:", response)
errornote = "You don't need to intro your response"
error("\n~~~ paraphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
error("\nparaphrase() response contain : ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
response = GeneralUtils.remove_french_accents(response)
response = replace(response, '*'=>"")
response = replace(response, '$' => "USD")
response = replace(response, '`' => "")
response = GeneralUtils.remove_french_accents(response)
responsedict = GeneralUtils.textToDict(response, ["Paraphrase"],
rightmarker=":", symbolkey=true, lowercasekey=true)
# check whether response has all header
detected_kw = GeneralUtils.detect_keyword(header, response)
if 0 values(detected_kw)
errornote = "\nYiemAgent paraphrase() response does not have all header"
continue
elseif sum(values(detected_kw)) > length(header)
errornote = "\nnYiemAgent paraphrase() response has duplicated header"
continue
end
responsedict = GeneralUtils.textToDict(response, header;
dictKey=dictkey, symbolkey=true)
for i [:paraphrase]
if length(JSON3.write(responsedict[i])) == 0
error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
error("$i is empty ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
end
@@ -770,7 +807,7 @@ function paraphrase(text2textInstructLLM::Function, text::String)
end
end
println("\n~~~ paraphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("\nparaphrase() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(Dict(responsedict))
result = responsedict[:paraphrase]
@@ -781,10 +818,10 @@ function paraphrase(text2textInstructLLM::Function, text::String)
showerror(io, e)
errorMsg = String(take!(io))
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
end
error("generatechat failed to generate a response")
error("paraphrase() failed to generate a response")
end
@@ -947,7 +984,7 @@ end
# ]
# # put in model format
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="qwen")
# prompt *=
# """
# <|start_header_id|>assistant<|end_header_id|>
@@ -979,7 +1016,7 @@ end
# state[:isterminal] = true
# state[:reward] = 1
# end
# println("--> 5 Evaluator ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
# println("--> 5 Evaluator ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# pprintln(Dict(responsedict))
# return responsedict[:score]
# catch e

View File

@@ -122,47 +122,53 @@ This function takes in a vector of dictionaries and outputs a single string wher
# Arguments
- `vecd::Vector`
a vector of dictionaries
A vector of dictionaries containing chat messages
- `withkey::Bool`
whether to include the key in the output text. Default is true
Whether to include the name as a prefix in the output text. Default is true
- `range::Union{Nothing,UnitRange,Int}`
Optional range of messages to include. If nothing, includes all messages
# Return
a string with the formatted dictionaries
# Returns
A formatted string where each line contains either:
- If withkey=true: "name> message\n"
- If withkey=false: "message\n"
# Example
```jldoctest
julia> using Revise
julia> using GeneralUtils
julia> vecd = [Dict(:name => "John", :text => "Hello"), Dict(:name => "Jane", :text => "Goodbye")]
julia> GeneralUtils.vectorOfDictToText(vecd, withkey=true)
"John> Hello\nJane> Goodbye\n"
```
# Signature
"""
function chatHistoryToText(vecd::Vector; withkey=true)::String
function chatHistoryToText(vecd::Vector; withkey=true, range=nothing)::String
# Initialize an empty string to hold the final text
text = ""
# Get the elements within the specified range, or all elements if no range provided
elements = isnothing(range) ? vecd : vecd[range]
# Determine whether to include the key in the output text or not
if withkey
# Loop through each dictionary in the input vector
for d in vecd
# Extract the 'name' and 'text' keys from the dictionary
name = d[:name]
_text = d[:text]
# Append the formatted string to the text variable
text *= "$name> $_text \n"
# Loop through each dictionary in the input vector
for d in elements
# Extract the 'name' and 'text' keys from the dictionary
name = d[:name]
_text = d[:text]
# Append the formatted string to the text variable
text *= "$name:> $_text \n"
end
else
# Loop through each dictionary in the input vector
for d in vecd
# Iterate over all key-value pairs in the dictionary
for (k, v) in d
# Append the formatted string to the text variable
text *= "$v \n"
end
end
# Loop through each dictionary in the input vector
for d in elements
# Iterate over all key-value pairs in the dictionary
for (k, v) in d
# Append the formatted string to the text variable
text *= "$v \n"
end
end
end
# Return the final text
@@ -191,6 +197,35 @@ end
""" Create a dictionary representing an event with optional details.
# Arguments
- `event_description::Union{String, Nothing}`
A description of the event
- `timestamp::Union{DateTime, Nothing}`
The time when the event occurred
- `subject::Union{String, Nothing}`
The subject or entity associated with the event
- `thought::Union{AbstractDict, Nothing}`
Any associated thoughts or metadata
- `actionname::Union{String, Nothing}`
The name of the action performed (e.g., "CHAT", "CHECKINVENTORY")
- `actioninput::Union{String, Nothing}`
Input or parameters for the action
- `location::Union{String, Nothing}`
Where the event took place
- `equipment_used::Union{String, Nothing}`
Equipment involved in the event
- `material_used::Union{String, Nothing}`
Materials used during the event
- `outcome::Union{String, Nothing}`
The result or consequence of the event after action execution
- `note::Union{String, Nothing}`
Additional notes or comments
# Returns
A dictionary with event details as symbol-keyed key-value pairs
"""
function eventdict(;
event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing,
@@ -220,24 +255,62 @@ function eventdict(;
end
function createTimeline(memory::T1; skiprecent::Integer=0) where {T1<:AbstractVector}
events = memory[1:end-skiprecent]
""" Create a formatted timeline string from a sequence of events.
# Arguments
- `events::T1`
Vector of event dictionaries containing subject, actioninput and optional outcome fields
Each event dictionary should have the following keys:
- :subject - The subject or entity performing the action
- :actioninput - The action or input performed by the subject
- :outcome - (Optional) The result or outcome of the action
# Returns
- `timeline::String`
A formatted string representing the events with their subjects, actions, and optional outcomes
Format: "{index}) {subject}> {actioninput} {outcome}\n" for each event
# Example
events = [
Dict(:subject => "User", :actioninput => "Hello", :outcome => nothing),
Dict(:subject => "Assistant", :actioninput => "Hi there!", :outcome => "with a smile")
]
timeline = createTimeline(events)
# 1) User> Hello
# 2) Assistant> Hi there! with a smile
"""
function createTimeline(events::T1; eventindex::Union{UnitRange, Nothing}=nothing
) where {T1<:AbstractVector}
# Initialize empty timeline string
timeline = ""
for (i, event) in enumerate(events)
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
# Determine which indices to use - either provided range or full length
ind =
if eventindex !== nothing
[eventindex...]
else
timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
1:length(events)
end
# Iterate through events and format each one
for (i, event) in zip(ind, events)
# If no outcome exists, format without outcome
if event[:outcome] === nothing
timeline *= "Event_$i $(event[:subject])> $(event[:actioninput])\n"
# If outcome exists, include it in formatting
else
timeline *= "Event_$i $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end
end
# Return formatted timeline string
return timeline
end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example

41
test/Manifest.toml Normal file
View File

@@ -0,0 +1,41 @@
# This file is machine-generated - editing it directly is not advised
julia_version = "1.11.4"
manifest_format = "2.0"
project_hash = "71d91126b5a1fb1020e1098d9d492de2a4438fd2"
[[deps.Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
version = "1.11.0"
[[deps.InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
version = "1.11.0"
[[deps.Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
version = "1.11.0"
[[deps.Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
version = "1.11.0"
[[deps.Random]]
deps = ["SHA"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
version = "1.11.0"
[[deps.SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
version = "0.7.0"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
version = "1.11.0"
[[deps.Test]]
deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
version = "1.11.0"

2
test/Project.toml Normal file
View File

@@ -0,0 +1,2 @@
[deps]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

View File

@@ -27,30 +27,50 @@
"description": "agent role"
},
"organization": {
"value": "yiem_hq",
"value": "yiem_branch_1",
"description": "organization name"
},
"externalservice": {
"text2textinstruct": {
"mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM",
"llminfo": {
"name": "llama3instruct"
}
},
"virtualWineCustomer_1": {
"mqtttopic": "/virtualenvironment/winecustomer",
"description": "text to text service with instruct LLM that act as wine customer",
"llminfo": {
"name": "llama3instruct"
}
},
"text2textchat": {
"mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM",
"llminfo": {
"name": "llama3instruct"
}
}
"loadbalancer": {
"mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM"
},
"text2textinstruct": {
"mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM",
"llminfo": {
"name": "llama3instruct"
}
},
"virtualWineCustomer_1": {
"mqtttopic": "/virtualenvironment/winecustomer",
"description": "text to text service with instruct LLM that act as wine customer",
"llminfo": {
"name": "llama3instruct"
}
},
"text2textchat": {
"mqtttopic": "/loadbalancer/requestingservice",
"description": "text to text service with instruct LLM",
"llminfo": {
"name": "llama3instruct"
}
},
"wineDB" : {
"description": "A wine database connection info for LibPQ client",
"host": "192.168.88.12",
"port": 10201,
"dbname": "wineDB",
"user": "yiemtechnologies",
"password": "yiemtechnologies@Postgres_0.0"
},
"SQLVectorDB" : {
"description": "A wine database connection info for LibPQ client",
"host": "192.168.88.12",
"port": 10203,
"dbname": "SQLVectorDB",
"user": "yiemtechnologies",
"password": "yiemtechnologies@Postgres_0.0"
}
}
}

View File

@@ -1,9 +0,0 @@
using GeneralUtils
response = "trajectory_evaluation:\nThe trajectory is correct so far. The thought accurately reflects the user's question, and the action taken is a valid attempt to retrieve data from the database that matches the specified criteria.\n\nanswer_evaluation:\nThe observation provides information about two red wines from Bordeaux rive droite in France, which partially answers the question. However, it does not provide a complete answer as it only lists the wine names and characteristics, but does not explicitly state whether there are any other wines that match the criteria.\n\naccepted_as_answer: No\n\nscore: 6\nThe trajectory is mostly correct, but the observation does not fully address the question.\n\nsuggestion: Consider adding more filters or parameters to the database query to retrieve a complete list of wines that match the specified criteria."
responsedict = GeneralUtils.textToDict(response,
["trajectory_evaluation", "answer_evaluation", "accepted_as_answer", "score", "suggestion"],
rightmarker=":", symbolkey=true)

0
test/runtests.jl Normal file
View File

View File

@@ -1,272 +1,292 @@
using Revise
using JSON, JSON3, Dates, UUIDs, PrettyPrinting, LibPQ, Base64, DataFrames
using YiemAgent, GeneralUtils
using Base.Threads
# ---------------------------------------------- 100 --------------------------------------------- #
# load config
config = JSON3.read("./test/config.json")
# config = copy(JSON3.read("../mountvolume/config.json"))
function executeSQL(sql::T) where {T<:AbstractString}
DBconnection = LibPQ.Connection("host=192.168.88.12 port=10201 dbname=wineDB user=yiemtechnologies password=yiemtechnologies@Postgres_0.0")
result = LibPQ.execute(DBconnection, sql)
close(DBconnection)
return result
end
function executeSQLVectorDB(sql)
DBconnection = LibPQ.Connection("host=192.168.88.12 port=10203 dbname=SQLVectorDB user=yiemtechnologies password=yiemtechnologies@Postgres_0.0")
result = LibPQ.execute(DBconnection, sql)
close(DBconnection)
return result
end
function text2textInstructLLM(prompt::String)
msgMeta = GeneralUtils.generate_msgMeta(
config[:externalservice][:text2textinstruct][:mqtttopic];
msgPurpose="inference",
senderName="yiemagent",
senderId=string(uuid4()),
receiverName="text2textinstruct",
mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port],
)
outgoingMsg = Dict(
:msgMeta => msgMeta,
:payload => Dict(
:text => prompt,
:kwargs => Dict(
:num_ctx => 16384,
:temperature => 0.2,
)
)
)
_response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=6000)
response = _response[:response][:text]
return response
end
# get text embedding from a LLM service
function getEmbedding(text::T) where {T<:AbstractString}
msgMeta = GeneralUtils.generate_msgMeta(
config[:externalservice][:text2textinstruct][:mqtttopic];
msgPurpose="embedding",
senderName="yiemagent",
senderId=string(uuid4()),
receiverName="text2textinstruct",
mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port],
)
outgoingMsg = Dict(
:msgMeta => msgMeta,
:payload => Dict(
:text => [text] # must be a vector of string
)
)
response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=6000)
embedding = response[:response][:embeddings]
return embedding
end
function findSimilarTextFromVectorDB(text::T1, tablename::T2, embeddingColumnName::T3,
vectorDB::Function; limit::Integer=1
)::DataFrame where {T1<:AbstractString, T2<:AbstractString, T3<:AbstractString}
# get embedding from LLM service
embedding = getEmbedding(text)[1]
# check whether there is close enough vector already store in vectorDB. if no, add, else skip
sql = """
SELECT *, $embeddingColumnName <-> '$embedding' as distance
FROM $tablename
ORDER BY distance LIMIT $limit;
"""
response = vectorDB(sql)
df = DataFrame(response)
return df
end
function similarSQLVectorDB(query; maxdistance::Integer=100)
tablename = "sqlllm_decision_repository"
# get embedding of the query
df = findSimilarTextFromVectorDB(query, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row != 0 && distance < maxdistance
# if there is usable SQL, return it.
output_b64 = df[1, :function_output_base64] # pick the closest match
output_str = String(base64decode(output_b64))
rowid = df[1, :id]
println("\n~~~ found similar sql. row id $rowid, distance $distance ", @__FILE__, " ", @__LINE__)
return (dict=output_str, distance=distance)
else
println("\n~~~ similar sql not found, max distance $maxdistance ", @__FILE__, " ", @__LINE__)
return (dict=nothing, distance=nothing)
end
end
function insertSQLVectorDB(query::T1, SQL::T2; maxdistance::Integer=1) where {T1<:AbstractString, T2<:AbstractString}
tablename = "sqlllm_decision_repository"
# get embedding of the query
# query = state[:thoughtHistory][:question]
df = findSimilarTextFromVectorDB(query, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row == 0 || distance > maxdistance # no close enough SQL stored in the database
query_embedding = getEmbedding(query)[1]
query = replace(query, "'" => "")
sql_base64 = base64encode(SQL)
sql_ = replace(SQL, "'" => "")
sql = """
INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$query', '$sql_', '$sql_base64', '$query_embedding');
"""
println("\n~~~ added new decision to vectorDB ", @__FILE__, " ", @__LINE__)
println(sql)
_ = executeSQLVectorDB(sql)
end
end
function similarSommelierDecision(recentevents::T1; maxdistance::Integer=5
)::Union{AbstractDict, Nothing} where {T1<:AbstractString}
tablename = "sommelier_decision_repository"
# find similar
println("\n~~~ search vectorDB for this: $recentevents ", @__FILE__, " ", @__LINE__)
df = findSimilarTextFromVectorDB(recentevents, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row != 0 && distance < maxdistance
# if there is usable decision, return it.
rowid = df[1, :id]
println("\n~~~ found similar decision. row id $rowid, distance $distance ", @__FILE__, " ", @__LINE__)
output_b64 = df[1, :function_output_base64] # pick the closest match
_output_str = String(base64decode(output_b64))
output = copy(JSON3.read(_output_str))
return output
else
println("\n~~~ similar decision not found, max distance $maxdistance ", @__FILE__, " ", @__LINE__)
return nothing
end
end
function insertSommelierDecision(recentevents::T1, decision::T2; maxdistance::Integer=5
) where {T1<:AbstractString, T2<:AbstractDict}
tablename = "sommelier_decision_repository"
# find similar
df = findSimilarTextFromVectorDB(recentevents, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row == 0 || distance > maxdistance # no close enough SQL stored in the database
recentevents_embedding = a.func[:getEmbedding](recentevents)[1]
recentevents = replace(recentevents, "'" => "")
decision_json = JSON3.write(decision)
decision_base64 = base64encode(decision_json)
decision = replace(decision_json, "'" => "")
sql = """
INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$recentevents', '$decision', '$decision_base64', '$recentevents_embedding');
"""
println("\n~~~ added new decision to vectorDB ", @__FILE__, " ", @__LINE__)
println(sql)
_ = executeSQLVectorDB(sql)
else
println("~~~ similar decision previously cached, distance $distance ", @__FILE__, " ", @__LINE__)
end
end
sessionId = "12345"
externalFunction = (
getEmbedding=getEmbedding,
text2textInstructLLM=text2textInstructLLM,
executeSQL=executeSQL,
similarSQLVectorDB=similarSQLVectorDB,
insertSQLVectorDB=insertSQLVectorDB,
similarSommelierDecision=similarSommelierDecision,
insertSommelierDecision=insertSommelierDecision,
)
a = YiemAgent.sommelier(
externalFunction;
name="Ton",
id=sessionId, # agent instance id
retailername="Yiem",
)
while true
println("your respond: ")
user_answer = readline()
response = YiemAgent.conversation(a, Dict(:text=> user_answer))
println("\n$response")
end
# response = YiemAgent.conversation(a, Dict(:text=> "I want to get a French red wine under 100."))
using Revise
using JSON, JSON3, Dates, UUIDs, PrettyPrinting, LibPQ, Base64, DataFrames
using YiemAgent, GeneralUtils
using Base.Threads
# ---------------------------------------------- 100 --------------------------------------------- #
# load config
config = JSON3.read("/appfolder/app/dev/YiemAgent/test/config.json")
# config = copy(JSON3.read("../mountvolume/config.json"))
function executeSQL(sql::T) where {T<:AbstractString}
host = config[:externalservice][:wineDB][:host]
port = config[:externalservice][:wineDB][:port]
dbname = config[:externalservice][:wineDB][:dbname]
user = config[:externalservice][:wineDB][:user]
password = config[:externalservice][:wineDB][:password]
DBconnection = LibPQ.Connection("host=$host port=$port dbname=$dbname user=$user password=$password")
result = LibPQ.execute(DBconnection, sql)
close(DBconnection)
return result
end
function executeSQLVectorDB(sql)
host = config[:externalservice][:SQLVectorDB][:host]
port = config[:externalservice][:SQLVectorDB][:port]
dbname = config[:externalservice][:SQLVectorDB][:dbname]
user = config[:externalservice][:SQLVectorDB][:user]
password = config[:externalservice][:SQLVectorDB][:password]
DBconnection = LibPQ.Connection("host=$host port=$port dbname=$dbname user=$user password=$password")
result = LibPQ.execute(DBconnection, sql)
close(DBconnection)
return result
end
function text2textInstructLLM(prompt::String; maxattempt::Integer=2, modelsize::String="medium")
msgMeta = GeneralUtils.generate_msgMeta(
config[:externalservice][:loadbalancer][:mqtttopic];
msgPurpose="inference",
senderName="yiemagent",
senderId=sessionId,
receiverName="text2textinstruct_$modelsize",
mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port],
)
outgoingMsg = Dict(
:msgMeta => msgMeta,
:payload => Dict(
:text => prompt,
:kwargs => Dict(
:num_ctx => 16384,
:temperature => 0.2,
)
)
)
response = nothing
for attempts in 1:maxattempt
_response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=180, maxattempt=maxattempt)
payload = _response[:response]
if _response[:success] && payload[:text] !== nothing
response = _response[:response][:text]
break
else
println("\n<text2textInstructLLM()> attempt $attempts/$maxattempt failed ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
pprintln(outgoingMsg)
println("</text2textInstructLLM()> attempt $attempts/$maxattempt failed ", @__FILE__, ":", @__LINE__, " $(Dates.now())\n")
sleep(3)
end
end
return response
end
# get text embedding from a LLM service
function getEmbedding(text::T) where {T<:AbstractString}
msgMeta = GeneralUtils.generate_msgMeta(
config[:externalservice][:loadbalancer][:mqtttopic];
msgPurpose="embedding",
senderName="yiemagent",
senderId=sessionId,
receiverName="textembedding",
mqttBrokerAddress=config[:mqttServerInfo][:broker],
mqttBrokerPort=config[:mqttServerInfo][:port],
)
outgoingMsg = Dict(
:msgMeta => msgMeta,
:payload => Dict(
:text => [text] # must be a vector of string
)
)
response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; timeout=120, maxattempt=3)
embedding = response[:response][:embeddings]
return embedding
end
function findSimilarTextFromVectorDB(text::T1, tablename::T2, embeddingColumnName::T3,
vectorDB::Function; limit::Integer=1
)::DataFrame where {T1<:AbstractString, T2<:AbstractString, T3<:AbstractString}
# get embedding from LLM service
embedding = getEmbedding(text)[1]
# check whether there is close enough vector already store in vectorDB. if no, add, else skip
sql = """
SELECT *, $embeddingColumnName <-> '$embedding' as distance
FROM $tablename
ORDER BY distance LIMIT $limit;
"""
response = vectorDB(sql)
df = DataFrame(response)
return df
end
function similarSQLVectorDB(query; maxdistance::Integer=100)
tablename = "sqlllm_decision_repository"
# get embedding of the query
df = findSimilarTextFromVectorDB(query, tablename,
"function_input_embedding", executeSQLVectorDB)
# println(df[1, [:id, :function_output]])
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
# distance = 100 # CHANGE this is for testing only
if row != 0 && distance < maxdistance
# if there is usable SQL, return it.
output_b64 = df[1, :function_output_base64] # pick the closest match
output_str = String(base64decode(output_b64))
rowid = df[1, :id]
println("\n~~~ found similar sql. row id $rowid, distance $distance ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
return (dict=output_str, distance=distance)
else
println("\n~~~ similar sql not found, max distance $maxdistance ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
return (dict=nothing, distance=nothing)
end
end
function insertSQLVectorDB(query::T1, SQL::T2; maxdistance::Integer=3) where {T1<:AbstractString, T2<:AbstractString}
tablename = "sqlllm_decision_repository"
# get embedding of the query
# query = state[:thoughtHistory][:question]
df = findSimilarTextFromVectorDB(query, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row == 0 || distance > maxdistance # no close enough SQL stored in the database
query_embedding = getEmbedding(query)[1]
query = replace(query, "'" => "")
sql_base64 = base64encode(SQL)
sql_ = replace(SQL, "'" => "")
sql = """
INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$query', '$sql_', '$sql_base64', '$query_embedding');
"""
# println("\n~~~ added new decision to vectorDB ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# println(sql)
_ = executeSQLVectorDB(sql)
end
end
function similarSommelierDecision(recentevents::T1; maxdistance::Integer=3
)::Union{AbstractDict, Nothing} where {T1<:AbstractString}
tablename = "sommelier_decision_repository"
# find similar
println("\n~~~ search vectorDB for this: $recentevents ", @__FILE__, " ", @__LINE__)
df = findSimilarTextFromVectorDB(recentevents, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row != 0 && distance < maxdistance
# if there is usable decision, return it.
rowid = df[1, :id]
println("\n~~~ found similar decision. row id $rowid, distance $distance ", @__FILE__, " ", @__LINE__)
output_b64 = df[1, :function_output_base64] # pick the closest match
_output_str = String(base64decode(output_b64))
output = copy(JSON3.read(_output_str))
return output
else
println("\n~~~ similar decision not found, max distance $maxdistance ", @__FILE__, " ", @__LINE__)
return nothing
end
end
function insertSommelierDecision(recentevents::T1, decision::T2; maxdistance::Integer=5
) where {T1<:AbstractString, T2<:AbstractDict}
tablename = "sommelier_decision_repository"
# find similar
df = findSimilarTextFromVectorDB(recentevents, tablename,
"function_input_embedding", executeSQLVectorDB)
row, col = size(df)
distance = row == 0 ? Inf : df[1, :distance]
if row == 0 || distance > maxdistance # no close enough SQL stored in the database
recentevents_embedding = a.func[:getEmbedding](recentevents)[1]
recentevents = replace(recentevents, "'" => "")
decision_json = JSON3.write(decision)
decision_base64 = base64encode(decision_json)
decision = replace(decision_json, "'" => "")
sql = """
INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$recentevents', '$decision', '$decision_base64', '$recentevents_embedding');
"""
println("\n~~~ added new decision to vectorDB ", @__FILE__, " ", @__LINE__)
println(sql)
_ = executeSQLVectorDB(sql)
else
println("~~~ similar decision previously cached, distance $distance ", @__FILE__, " ", @__LINE__)
end
end
sessionId = "12345"
externalFunction = (
getEmbedding=getEmbedding,
text2textInstructLLM=text2textInstructLLM,
executeSQL=executeSQL,
similarSQLVectorDB=similarSQLVectorDB,
insertSQLVectorDB=insertSQLVectorDB,
similarSommelierDecision=similarSommelierDecision,
insertSommelierDecision=insertSommelierDecision,
)
a = YiemAgent.sommelier(
externalFunction;
name="Ton",
id=sessionId, # agent instance id
retailername="Yiem",
)
while true
print("\nyour respond: ")
user_answer = readline()
response = YiemAgent.conversation(a, Dict(:text=> user_answer))
println("\n$response")
end
# response = YiemAgent.conversation(a, Dict(:text=> "I want to get a French red wine under 100."))
"""
hello I want to get a bottle of red wine for my boss. I have a budget around 50 dollars. Show me some options.
I have no idea about his wine taste but he likes spicy food.
"""