diff --git a/src/interface.jl b/src/interface.jl index 6b8756c..808f18c 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -150,7 +150,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen else systemmsg = """ - Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s online store. + Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. Your goal includes: 1) Establish a connection with the customer by greeting them warmly 2) Help them select the best wines from your inventory that align with their preferences @@ -159,10 +159,10 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen 1) Make an informed decision about what you need to do to achieve the goal 2) Thanks the user when they don't need any further assistance and invite them to comeback next time - Your responsibility does NOT include: + Your responsibility excludes: 1) Asking or guiding the user to make a purchase 2) Processing sales orders or engaging in any other sales-related activities - 3) Providing services other than making recommendations. + 3) Answering questions and offering additional services beyond just recommendations, such as delivery, box, gift wrapping or packaging, personalized messages. Customers can reach out to our sales at the store. At each round of conversation, you will be given the current situation: Your recent events: latest 5 events of the situation @@ -194,7 +194,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen Can be one of the following functions: - CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific. - CHECKINVENTORY which you can use to check info about wine you want in your inventory. The input is a search term in verbal English. - Good query example: black car, a stereo, 200 mile range, electric motor. + Good query example: white wine, full-bodied, France, less than 2000 USD. - ENDCONVERSATION which you can use when you believe the user has concluded their interaction, to properly end the conversation with them. Input is "NA". 5) Action_input: input of the action @@ -214,7 +214,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen if haskey(a.memory[:shortmem], :available_wine) # check if wine name mentioned in timeline, only check first wine name is enough # because agent will recommend every wines it found each time. - df = a.memory[:shortmem][:available_wine] + df = a.memory[:shortmem][:available_wine] winenames = df[:, :wine_name] for winename in winenames if !occursin(winename, chathistory) @@ -260,6 +260,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen response = a.func[:text2textInstructLLM](prompt) response = GeneralUtils.remove_french_accents(response) + response = replace(response, '*'=>"") # check if response contain more than one functions from ["CHATBOX", "CHECKINVENTORY", "ENDCONVERSATION"] count = 0 @@ -284,24 +285,30 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol,Any} where {T<:agen continue end + checkFlag = false for i ∈ [:understanding, :plan, :action_name] if length(responsedict[i]) == 0 error("$i is empty ", @__FILE__, " ", @__LINE__) errornote = "$i is empty" println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) - continue + checkFlag = true + break end end + checkFlag == true ? continue : nothing # check if there are more than 1 key per categories + checkFlag = false for i ∈ [:understanding, :plan, :action_name, :action_input] matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i) if length(matchkeys) > 1 errornote = "DecisionMaker has more than one key per categories" println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) - continue + checkFlag = true + break end end + checkFlag == true ? continue : nothing println("\n~~~ Yiem decisionMaker() ", @__FILE__, " ", @__LINE__) pprintln(Dict(responsedict)) @@ -691,6 +698,7 @@ function conversation(a::sommelier, userinput::Dict) actionname = nothing result = nothing chatresponse = nothing + userinput[:text] = GeneralUtils.remove_french_accents(userinput[:text]) if userinput[:text] == "newtopic" clearhistory(a) @@ -808,7 +816,7 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh errormsg::Union{AbstractString,Nothing} = haskey(response, :errormsg) ? response[:errormsg] : nothing success::Bool = haskey(response, :success) ? response[:success] : false - #[WORKING] manage memory (pass msg to generatechat) + # manage memory (pass msg to generatechat) if actionname ∈ ["CHATBOX", "PRESENTBOX", "ENDCONVERSATION"] chatresponse = generatechat(a, thoughtDict) push!(a.memory[:events], @@ -829,25 +837,28 @@ function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} wh # ) ) result = chatresponse - if actionname == "PRESENTBOX" - df = a.memory[:shortmem][:available_wine] - winename = join(df[:, :wine_name], ", ") - if a.memory[:state][:wine_presented_to_user] == "None" - a.memory[:state][:wine_presented_to_user] = winename - else - a.memory[:state][:wine_presented_to_user] *= ", $winename" - end - end elseif actionname == "CHECKINVENTORY" - if haskey(a.memory[:shortmem], :available_wine) # store wines in dataframe format - df = a.memory[:shortmem][:available_wine] - a.memory[:shortmem][:available_wine] = vcat(df, rawresponse) - elseif rawresponse !== nothing - a.memory[:shortmem][:available_wine] = rawresponse + if rawresponse !== nothing + if haskey(a.memory[:shortmem], :available_wine) + df = a.memory[:shortmem][:available_wine] + #[TESTING] sometime df 2 df has different column size + dfCol = names(df) + rawresponse_dfCol = names(rawresponse) + if length(dfCol) > length(rawresponse_dfCol) + a.memory[:shortmem][:available_wine] = DataFrames.outerjoin(df, rawresponse, on=rawresponse_dfCol) + elseif length(dfCol) < length(rawresponse_dfCol) + a.memory[:shortmem][:available_wine] = DataFrames.outerjoin(df, rawresponse, on=dfCol) + else + a.memory[:shortmem][:available_wine] = vcat(df, rawresponse) + end + else + a.memory[:shortmem][:available_wine] = rawresponse + end else - # skip, no result + # no result, skip end + push!(a.memory[:events], eventdict(; event_description= "the assistant searched the database.", @@ -891,7 +902,7 @@ julia> function generatechat(a::sommelier, thoughtDict) systemmsg = """ - Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for an online wine store. + Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store. You are currently talking with the user. Your goal includes: 1) Help the user select the best wines from your inventory that align with the user's preferences. @@ -899,9 +910,10 @@ function generatechat(a::sommelier, thoughtDict) Your responsibility includes: 1) Given the situation, convey your thoughts to the user. - Your responsibility do not include: - 1) Asking or guiding the user to make a purchase - 2) Processing sales orders or engaging in any other sales-related activities + Your responsibility excludes: + 1) Asking or guiding the user to make a purchase + 2) Processing sales orders or engaging in any other sales-related activities + 3) Answering questions and offering additional services beyond just recommendations, such as delivery, box, gift wrapping, personalized messages. Customers can reach out to our sales at the store. At each round of conversation, you will be given the current situation: Your ongoing conversation with the user: ... @@ -961,15 +973,17 @@ function generatechat(a::sommelier, thoughtDict) """ try - response_1 = a.func[:text2textInstructLLM](prompt) + response = a.func[:text2textInstructLLM](prompt) # sometime the model response like this "here's how I would respond: ..." - if occursin("respond:", response_1) + if occursin("respond:", response) errornote = "You don't need to intro your response" error("generatechat() response contain : ", @__FILE__, " ", @__LINE__) end - response_2 = replace(response_1, '*' => "") - response_3 = replace(response_2, '$' => "USD") - response = replace(response_3, '`' => "") + response = GeneralUtils.remove_french_accents(response) + response = replace(response, '*'=>"") + response = replace(response, '$' => "USD") + response = replace(response, '`' => "") + response = GeneralUtils.remove_french_accents(response) responsedict = GeneralUtils.textToDict(response, ["Chat"], rightmarker=":", symbolkey=true, lowercasekey=true) @@ -1017,8 +1031,8 @@ function generatechat(a::sommelier, thoughtDict) # then the agent is not supposed to recommend the wine if isWineInEvent == false - errornote = "Note: You are not supposed to recommend a wine that is not in your inventory." - error("Note: You are not supposed to recommend a wine that is not in your inventory.") + errornote = "Previously: You recommend a wine that is not in your inventory which is not allowed." + error("Previously: You recommend a wine that is not in your inventory which is not allowed.") end end diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 5979a13..d7b6103 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -330,183 +330,6 @@ julia> # Signature """ -# function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString} - -# systemmsg = -# """ -# As a helpful sommelier, your task is to extract the user information from the user's query as much as possible to fill out user's preference form. - -# At each round of conversation, the user will give you the current situation: -# User's query: ... - -# You must follow the following guidelines: -# 1) If specific information required in the preference form is not available in the query or there isn't any, mark with "NA" to indicate this. -# Additionally, words like 'any' or 'unlimited' mean no information is available. -# 2) Do not generate other comments. - -# You should then respond to the user with the following points: -# - reasoning: state your understanding of the current situation -# - wine_name: name of the wine -# - winery: name of the winery -# - vintage: the year of the wine -# - region: a region in a country where the wine is produced, such as Burgundy, Napa Valley, etc -# - country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States" -# - wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified" -# - grape_variety: the name of the primary grape used to make the wine -# - tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc -# - wine_price: price of wine. For example, up to 100, less than 100, 20 to 100, 30-79.95 -# - occasion: the occasion the user is having the wine for -# - food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc - - -# You should only respond in the user's preference form as described below: -# reasoning: ... -# winery: ... -# wine_name: ... -# vintage: ... -# region: ... -# country: ... -# wine_type: ... -# grape_variety: ... -# tasting_notes: ... -# wine_price: ... -# occasion: ... -# food_to_be_paired_with_wine: ... - -# Here are some example: -# User's query: red, Chenin Blanc, Riesling, under 20 -# reasoning: ... -# winery: NA -# wine_name: NA -# vintage: NA -# region: NA -# country: NA -# wine_type: red -# grape_variety: Chenin Blanc, Riesling -# tasting_notes: NA -# wine_price: under 20 -# occasion: NA -# food_to_be_paired_with_wine: NA - -# User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc -# reasoning: ... -# winery: Domaine du Collier -# wine_name: Saumur Blanc -# vintage: 2019 -# region: Saumur -# country: France -# wine_type: white -# grape_variety: Chenin Blanc -# tasting_notes: NA -# wine_price: 109 -# occasion: NA -# food_to_be_paired_with_wine: NA - -# Let's begin! -# """ - -# attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] -# errornote = "" -# maxattempt = 5 -# for attempt in 1:maxattempt - -# usermsg = -# """ -# User's query: $input -# $errornote -# """ - -# _prompt = -# [ -# Dict(:name=> "system", :text=> systemmsg), -# Dict(:name=> "user", :text=> usermsg) -# ] - -# # put in model format -# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct") -# prompt *= -# """ -# <|start_header_id|>assistant<|end_header_id|> -# """ - -# try -# response = a.func[:text2textInstructLLM](prompt) -# response = GeneralUtils.remove_french_accents(response) - -# # check wheter all attributes are in the response -# for word in attributes -# if !occursin(word, response) -# error("$word attribute is missing") -# end -# end - -# responsedict = GeneralUtils.textToDict(response, attributes, rightmarker=":", symbolkey=true) - -# for i ∈ attributes -# if length(JSON3.write(responsedict[Symbol(i)])) == 0 -# error("$i is empty ", @__LINE__) -# end -# end - -# #check if the following attributes has more than 1 name -# # responsedict[:grape_variety] = split(responsedict[:grape_variety], ',')[1] -# # responsedict[:grape_variety] = split(responsedict[:grape_variety], '/')[1] - -# responsedict[:country] = split(responsedict[:country], ',')[1] -# responsedict[:country] = split(responsedict[:country], '/')[1] - -# responsedict[:region] = split(responsedict[:region], ',')[1] -# responsedict[:region] = split(responsedict[:region], '/')[1] - -# delete!(responsedict, :reasoning) -# delete!(responsedict, :tasting_notes) -# delete!(responsedict, :occasion) -# delete!(responsedict, :food_to_be_paired_with_wine) - -# # check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates -# for i in [:grape_variety, :winery, :wine_name, :region] -# result = check_key_in_input(input, responsedict, attempt, maxattempt, i) -# if result === nothing -# # nothing wrong -# elseif result == "NA" -# responsedict[i] = "NA" -# else -# errornote = result -# error(errornote) -# end -# end - -# # remove (some text) -# for (k, v) in responsedict -# _v = replace(v, r"\(.*?\)" => "") -# responsedict[k] = _v -# end - -# result = "" -# for (k, v) in responsedict -# # some time LLM generate text with "(some comment)". this line removes it -# if !occursin("NA", v) && v != "" && !occursin("none", v) && !occursin("None", v) -# result *= "$k: $v, " -# end -# end - -# #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral" - -# result = result[1:end-2] # remove the ending ", " - -# return result -# catch e -# io = IOBuffer() -# showerror(io, e) -# errorMsg = String(take!(io)) -# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace())) -# println("") -# println("Attempt $attempt. Error occurred: $errorMsg\n$st ", @__FILE__, " ", @__LINE__) -# println("") -# end -# end -# error("wineattributes_wordToNumber() failed to get a response") -# end function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2<:AbstractString} systemmsg = @@ -529,27 +352,27 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< - region: a region (NOT a country) where the wine is produced, such as Burgundy, Napa Valley, etc - country: a country where the wine is produced. Can be "Austria", "Australia", "France", "Germany", "Italy", "Portugal", "Spain", "United States" - wine_type: can be one of: "red", "white", "sparkling", "rose", "dessert" or "fortified" - - grape_variety: the name of the primary grape used to make the wine + - grape_varietal: the name of the primary grape used to make the wine - tasting_notes: a brief description of the wine's taste, such as "butter", "oak", "fruity", etc - - wine_price: price of wine. For example, up to 100, less than 100, 20 to 100, 30-79.95 + - wine_price: price range of wine. - occasion: the occasion the user is having the wine for - food_to_be_paired_with_wine: food that the user will be served with the wine such as poultry, fish, steak, etc You should only respond in the user's preference form (JSON) as described below: - {"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_variety": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...} + {"reasoning": ..., "winery": ..., "wine_name": ..., "vintage": ..., "region": ..., "country": ..., "wine_type": ..., "grape_varietal": ..., "tasting_notes": ..., "wine_price": ..., "occasion": ..., "food_to_be_paired_with_wine": ...} Here are some example: - User's query: red, Chenin Blanc, Riesling, under 20 - {"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red", "grape_variety": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "under 20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} + User's query: red, Chenin Blanc, Riesling, 20 USD + {"reasoning": ..., "winery": "NA", "wine_name": "NA", "vintage": "NA", "region": "NA", "country": "NA", "wine_type": "red", "grape_varietal": "Chenin Blanc, Riesling", "tasting_notes": "NA", "wine_price": "0-20", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} User's query: Domaine du Collier Saumur Blanc 2019, France, white, Chenin Blanc - {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_variety": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "109", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} + {"reasoning": ..., "winery": "Domaine du Collier", "wine_name": "Saumur Blanc", "vintage": "2019", "region": "Saumur", "country": "France", "wine_type": "white", "grape_varietal": "Chenin Blanc", "tasting_notes": "NA", "wine_price": "NA", "occasion": "NA", "food_to_be_paired_with_wine": "NA"} Let's begin! """ - attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_variety", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] + attributes = ["reasoning", "winery", "wine_name", "vintage", "region", "country", "wine_type", "grape_varietal", "tasting_notes", "wine_price", "occasion", "food_to_be_paired_with_wine"] errornote = "" for attempt in 1:5 @@ -576,13 +399,16 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< response = GeneralUtils.remove_french_accents(response) # check wheter all attributes are in the response + checkFlag = false for word in attributes if !occursin(word, response) errornote = "$word attribute is missing in previous attempts" println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) - continue + checkFlag = true + break end end + checkFlag == true ? continue : nothing responsedict = copy(JSON3.read(response)) @@ -591,24 +417,52 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< delete!(responsedict, :occasion) delete!(responsedict, :food_to_be_paired_with_wine) - # check if winery, wine_name, region, country, wine_type, grape_variety are in the query because sometime AI halucinates - for i in [:grape_variety, :winery, :wine_name, :region] - content = responsedict[i] - if occursin(",", content) - content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling" - content = strip.(content) - else - content = [content] - end + println(@__FILE__, " ", @__LINE__) + pprintln(responsedict) - for x in content - if !occursin("NA", responsedict[i]) && !occursin(x, input) - errornote = "$x is not mentioned in the user query, you must only use the info from the query." - println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) - continue + # check if winery, wine_name, region, country, wine_type, grape_varietal's value are in the query because sometime AI halucinates + checkFlag = false + for i in attributes + j = Symbol(i) + if j ∉ [:reasoning, :tasting_notes, :occasion, :food_to_be_paired_with_wine] + # in case j is wine_price it needs to be checked differently because its value is ranged + if j == "wine_price" + if responsedict[:wine_price] != "NA" + # check whether wine_price is in ranged number + if !occursin('-', responsedict[:wine_price]) + errornote = "wine_price must be a range number" + println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) + checkFlag = true + break + end + + # check whether max wine_price is in the input + maxprice = split(responsedict[:wine_price], '-')[end] + if !occursin(maxprice, input) + responsedict[:wine_price] = "NA" + end + end + else + content = responsedict[j] + if occursin(",", content) + content = split(content, ",") # sometime AI generates multiple values e.g. "Chenin Blanc, Riesling" + content = strip.(content) + else + content = [content] + end + + for x in content + if !occursin("NA", responsedict[j]) && !occursin(x, input) + errornote = "$x is not mentioned in the user query, you must only use the info from the query." + println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) + checkFlag == true + break + end + end end end end + checkFlag == true ? continue : nothing # remove (some text) for (k, v) in responsedict @@ -624,7 +478,7 @@ function extractWineAttributes_1(a::T1, input::T2)::String where {T1<:agent, T2< end end - #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_variety: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral" + #[PENDING] remove halucination. "highend dry white wine" --> "wine_type: white, occasion: special occasion, food_to_be_paired_with_wine: seafood, fish, country: France, Italy, USA, grape_varietal: Chardonnay, Sauvignon Blanc, Pinot Grigio\nwine_notes: citrus, green apple, floral" result = result[1:end-2] # remove the ending ", " @@ -785,7 +639,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2< for (k, v) in responsedict if !occursin("keyword", string(k)) if v !== "NA" && (!occursin('-', v) || length(v) > 5) - errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, such as min-max." + errornote = "WARNING: The non-range value {$k: $v} is not allowed. It should be specified in a range format, i.e. min-max." println("Attempt $attempt $errornote ", @__FILE__, " ", @__LINE__) continue end diff --git a/src/util.jl b/src/util.jl index e449dd6..2d13228 100644 --- a/src/util.jl +++ b/src/util.jl @@ -1,6 +1,6 @@ module util -export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises +export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises, createTimeline using UUIDs, Dates, DataStructures, HTTP, JSON3 using GeneralUtils @@ -169,31 +169,6 @@ function vectorOfDictToText(vecd::Vector; withkey=true)::String end -# function eventdict(; -# event_description::Union{String, Nothing}=nothing, -# timestamp::Union{DateTime, Nothing}=nothing, -# subject::Union{String, Nothing}=nothing, -# action_or_dialogue::Union{String, Nothing}=nothing, -# location::Union{String, Nothing}=nothing, -# equipment_used::Union{String, Nothing}=nothing, -# material_used::Union{String, Nothing}=nothing, -# outcome::Union{String, Nothing}=nothing, -# note::Union{String, Nothing}=nothing, -# ) -# return Dict{Symbol, Any}( -# :event_description=> event_description, -# :timestamp=> timestamp, -# :subject=> subject, -# :action_or_dialogue=> action_or_dialogue, -# :location=> location, -# :equipment_used=> equipment_used, -# :material_used=> material_used, -# :outcome=> outcome, -# :note=> note, -# ) -# end - - function eventdict(; event_description::Union{String, Nothing}=nothing, timestamp::Union{DateTime, Nothing}=nothing, @@ -222,6 +197,32 @@ function eventdict(; ) end + +function createTimeline(memory::T1, recent) where {T1<:AbstractVector} + totalevents = length(memory) + ind = + if totalevents > recent + start = totalevents - recent + start:totalevents + else + 1:totalevents + end + + timeline = "" + for (i, event) in enumerate(memory[ind]) + if event[:outcome] === nothing + timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n" + else + timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n" + end + end + + return timeline +end + + + + # """ Convert a single chat dictionary into LLM model instruct format. # # Llama 3 instruct format example