diff --git a/src/interface.jl b/src/interface.jl
index 7a57a6f..9d5a159 100755
--- a/src/interface.jl
+++ b/src/interface.jl
@@ -451,13 +451,13 @@ function selfAwareness(a::agentReflex)
Use the following format strictly:
- Info extraction: repeat all keywords from the latest observed result thoroughly
- Info mapping: based on extracted info, explicitly state what each keyword could match which keyword memory's key
+ Info extraction: repeat all important info from the latest observed result thoroughly
+ Info mapping: based on extracted info, explicitly state what each info could match which keyword memory's key
Info matching: using JSON format, what key in my memory matches which info
- The user wants to buy an electric SUV car.
+ The user wants to buy an electric SUV car under 20000 dollars.
{"car type": null, "color": null, "financing": null}
@@ -477,13 +477,17 @@ function selfAwareness(a::agentReflex)
stopword=["/n/n", "END", "End", "Obs", "<|", ""])
response = split(response, "<|")[1]
response = split(response, "")[1]
+ response = "Info extraction:" * response
+ println("")
@show selfaware_1 = response
#WORKING
- headerToDetect = ["Info extraction:", "Info matching:",]
+ headerToDetect = ["Info extraction:", "Info mapping:", "Info matching:", "Actinput"]
headers = detectCharacters(response, headerToDetect)
- chunkedtext = chunktext(response, headers)
+
+ # headers[1:2] is for when LLM generate more than a paire of "Info extraction" and "Info matching", discard the rest
+ chunkedtext = chunktext(response, headers[1:3])
println("")
_infomatch = chunkedtext["Info matching:"]
_infomatch = GeneralUtils.getStringBetweenCharacters(_infomatch, '{', '}', endCharLocation="next")
@@ -527,6 +531,7 @@ function selfAwareness(a::agentReflex)
# response = split(response, "")[1]
response = "What I know about user:" * JSON3.write(a.memory[:keyword]) # * response
+ println("")
@show selfaware_2 = response
@@ -590,12 +595,13 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
end
thought = "Thought: you should always think about what to do according to the plan (pay attention to correct numeral calculation and commonsense and do one thing at a time.)"
+ startword = "Thought:"
if selfaware !== nothing
-
thought =
- "Self-awareness: $(JSON3.write(a.memory[:keyword]))
- Thought: To think about your next step, reflect on your self-awareness and prioritize what you need to improve first (null) then consult your plan. (P.S. 1) let's think a single step. 2) pay attention to correct numeral calculation and commonsense.)
+ "Self-awareness: readout all info (key and value) you know and not know about the user one by one
+ Thought: based on your self-awareness, focus on what you need to improve first then follow your plan to decide what to do next. (P.S. 1) let's think a single step. 2) pay attention to correct numeral calculation and commonsense.)
"
+ startword = "Self-awareness:"
end
# your should request the missing information first before making a decision
aboutYourself =
@@ -616,6 +622,8 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
winestocksearchresult = "\n"
end
+ #WORKING I need a llm json read function
+
prompt =
"""
<|system|>
@@ -628,7 +636,9 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
$(a.memory[:shortterm]["Plan 1:"])
-
+
+ $(JSON3.write(a.memory[:keyword]))
+
Use the following format:
$thought
@@ -637,14 +647,21 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
Obs: observed result of the action
+
+ {"car type": "SUV", "engine type": "electric motor", "price": "20k dollar", "color": null, "financing method": null}
+
+ "Self-awareness:
+ - The user wants an electric SUV car
+ - The user budget is 20k dollars
+ - I don't know about the car color yet
+ - I don't knoe about financing method yet
Thought: based on self-awareness, I think he also need to know whether there are any charging station near by his house. I should search the internet to get this info.
Act: internetsearch
Actinput: {\"internetsearch\": \"EV charging station near Bangkok\"}
<|assistant|>
- $work
- "Thought: "
+ $startword
"""
prompt = replace(prompt, "{toolnames}" => toolnames)
@@ -656,24 +673,22 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
chunkedtext = nothing
latestTask = nothing
- tempcounter = 0.2
- seed = nothing
while true # while Thought or Act is empty, run actor again
- # tempcounter += 0.2
- @show tempcounter
+
response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.4, timeout=300,
stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>"],
- seed=seed)
+ seed=rand(1000000:2000000))
+ println("")
+ @show actor_raw = response
+
response = splittext(response, ["/n/n", "END", "End","obs", "Obs", "<|im_end|>"])
response = split(response, "<|")[1]
response = split(response, "")[1]
- response = split(response, "Thought:")[end]
+ # response = split(response, "Thought:")[end]
latestTask = shortMemLatestTask(a.memory[:shortterm]) +1
- if occursin("Thought", response) == false
- response = "Thought:" * response
- end
+ response = startword * response
headerToDetect = ["Plan:", "Self-awareness:", "Thought:",
"Act:", "Actinput:", "Obs:",
@@ -690,7 +705,7 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
response = replace(response, "actinput:"=>"Actinput:")
println("")
- @show response
+ @show actor_response = response
headerToDetect = ["Plan $(a.attempt):",
"Self-awareness $latestTask:",
diff --git a/src/type.jl b/src/type.jl
index 93df93f..12d8e6f 100644
--- a/src/type.jl
+++ b/src/type.jl
@@ -151,14 +151,13 @@ function agentReflex(
:sommelier =>
"""
Request the user’s input for the following info initially, and use alternative sources of information only if they are unable to provide it:
- - occasion
+ - wine price range: ask the user
+ - wine type (Rose, White, Red, Rose, Sparkling, Dessert)
- food type that will be served with wine
- - wine type (Rose, White, Red, Rose, Sparkling, Dessert)
- wine sweetness level (dry to very sweet)
- wine intensity level (light to full bodied)
- wine tannin level (low to high)
- wine acidity level (low to high)
- - wine price range: ask the user
- wines we have in stock (use winestock tool)
"""
),