diff --git a/src/interface.jl b/src/interface.jl
index 9d5a159..1db7457 100755
--- a/src/interface.jl
+++ b/src/interface.jl
@@ -5,7 +5,7 @@ export agentReact, agentReflex,
addNewMessage, clearMessage, removeLatestMsg, conversation, directconversation,
writeEvaluationGuideline, grading, analyze, selfReflext,
formulateUserResponse, extractinfo, updateEnvState, chat_mistral_openorca,
- recap
+ recap, readKeywordMemory
using JSON3, DataStructures, Dates, UUIDs, HTTP, Random
using CommUtils, GeneralUtils
@@ -226,7 +226,7 @@ function planner_mistral_openorca(a::agentReflex)
9. Use inventory tool to find cars that match the user's preferences and are within their price range
10. Use finalanswer tool to present the recommended car to the user.
Keyword memory: {"mile per day": null, "carry item": null, "car type": null, "price range": null}
-
$conversation
<|assistant|>
@@ -455,19 +455,20 @@ function selfAwareness(a::agentReflex)
Info mapping: based on extracted info, explicitly state what each info could match which keyword memory's key
Info matching: using JSON format, what key in my memory matches which info
+
-
- The user wants to buy an electric SUV car under 20000 dollars.
-
-
- {"car type": null, "color": null, "financing": null}
-
- Info extraction:
- - The user is buying an electric SUV car.
- Info mapping:
- - SUV could matches "car type" key
- - electric could matches "engine type" key
- Info matching: {"car type": "SUV", "engine type": "electric motor", "color": null, "financing": null}
+
+ The user wants to buy an electric SUV car under 20000 dollars.
+
+
+ {"car type": null, "color": null, "financing": null}
+
+ Info extraction:
+ - The user is buying an electric SUV car.
+ Info mapping:
+ - SUV could matches "car type" key
+ - electric could matches "engine type" key
+ Info matching: {"car type": "SUV", "engine type": "electric motor", "color": null, "financing": null}
<|assistant|>
@@ -597,11 +598,9 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
thought = "Thought: you should always think about what to do according to the plan (pay attention to correct numeral calculation and commonsense and do one thing at a time.)"
startword = "Thought:"
if selfaware !== nothing
- thought =
- "Self-awareness: readout all info (key and value) you know and not know about the user one by one
- Thought: based on your self-awareness, focus on what you need to improve first then follow your plan to decide what to do next. (P.S. 1) let's think a single step. 2) pay attention to correct numeral calculation and commonsense.)
"
- startword = "Self-awareness:"
+ Thought: based on what you know, you should focus on what you need to improve first then follow your plan to decide what to do next. (P.S. 1) let's think a single step. 2) pay attention to correct numeral calculation and commonsense.)
+ "
end
# your should request the missing information first before making a decision
aboutYourself =
@@ -648,14 +647,9 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
- {"car type": "SUV", "engine type": "electric motor", "price": "20k dollar", "color": null, "financing method": null}
+ $(readKeywordMemory(a))
- "Self-awareness:
- - The user wants an electric SUV car
- - The user budget is 20k dollars
- - I don't know about the car color yet
- - I don't knoe about financing method yet
- Thought: based on self-awareness, I think he also need to know whether there are any charging station near by his house. I should search the internet to get this info.
+ Thought: based on what you know, I think he also need to know whether there are any charging station near by his house. I should search the internet to get this info.
Act: internetsearch
Actinput: {\"internetsearch\": \"EV charging station near Bangkok\"}
@@ -1674,6 +1668,121 @@ function directconversation(a::agentReflex, usermsg::String)
end
+""" Convert keyword memory into a string.
+
+ Arguments:
+ a, one of ChatAgent's agent.
+ keywordmemory, a dictionary of keyword memory.
+
+ Return:
+ a string of LLM readout from keyword memory
+
+ Example:
+ ```jldoctest
+ julia> using ChatAgent, CommUtils
+ julia> a = ChatAgent.agentReflex("Jene")
+ julia> keywordmemory = OrderedDict{String, Any}(
+ "food type" => nothing,
+ "tannin level" => "low to medium",
+ "intensity level" => "medium-bodied",
+ "acidity level" => nothing,
+ "price range" => "fifteen dollars",
+ "wine type" => "Red",
+ "sweetness level" => "dry",
+ )
+
+ julia> readout = readKeywordMemory(a, keywordmemory=keywordmemory)
+ " - The user did not provide food type yet
+ - The user prefers a low to medium tannin level
+ - The user prefers a medium-bodied intensity level
+ - The user did not provide acidity level yet
+ - The user prefers price range is fifteen dollars
+ - The user prefers a Red wine type
+ - The user prefers a dry sweetness level"
+ ```
+"""
+function readKeywordMemory(a; keywordmemory::Union{AbstractDict, Nothing}=nothing)
+
+ keywordmemory = keywordmemory !== nothing ? keywordmemory : a.memory[:keyword]
+ result = ""
+
+ if !isempty(keywordmemory)
+ new_keywordmemory = deepcopy(keywordmemory)
+ @show keywordmemory
+
+
+ # prepare reversed dict for pop! coz I need to preserve key order
+ reversed_keywordmemory = Dict()
+ while length(new_keywordmemory) > 0
+ k, v = pop!(new_keywordmemory)
+ reversed_keywordmemory[k] = v
+ end
+
+ while length(reversed_keywordmemory) > 0
+ tempdict = OrderedDict()
+ for i in 1:4
+ if length(reversed_keywordmemory) == 0
+ break
+ else
+ k, v = pop!(reversed_keywordmemory)
+ tempdict[k] = v
+ end
+ end
+
+ # ask LLM to read tempdict
+ jsonstr = JSON3.write(tempdict)
+ prompt =
+ """
+
+ <|system|>
+
+ Your name is $(a.agentName)
+ $(a.roles[a.role])
+
+
+ Readout all the key and value pairs in memory, one by one. Do not say anything else.
+
+ |system|>
+
+
+ {\"car type\": "SUV",\"brand\":\"Lexus\",\"price\":\"20k dollar\",\"color\": null,\"financing method\": null, \"luxury level\":\"high\"}
+
+ <|assistant|>
+ - Car type is SUV
+ - Brand is Lexus
+ - Price is 20k dollar
+ - No info on the car color yet
+ - No info on the financing method yet
+ - Luxury level is high
+ |assistant|>
+
+
+
+ User preference: $jsonstr
+
+ <|assistant|>
+ """
+
+ response = sendReceivePrompt(a, prompt, max_tokens=512, temperature=0.0)
+ response = split(response, "|assistant|>")[1]
+
+ # store LLM readout string to result
+ result = result * response
+ end
+ end
+
+ return result
+end
+
+
+
+
+
+
+
+
+
+