diff --git a/src/interface.jl b/src/interface.jl
index 48c0c40..61b3164 100755
--- a/src/interface.jl
+++ b/src/interface.jl
@@ -152,6 +152,7 @@ end
function planner_mistral_openorca(a::agentReflex)
+
"""
general prompt format:
@@ -180,6 +181,7 @@ function planner_mistral_openorca(a::agentReflex)
"""
conversation = messagesToString(a.messages)
+
toollines = ""
for (toolname, v) in a.tools
if toolname ∉ [""]
@@ -191,14 +193,14 @@ function planner_mistral_openorca(a::agentReflex)
# skip objective and plan because LLM is going to generate new plan
shorttermMemory = dictToString(a.memory[:shortterm], skiplist=["Objective:", "Plan 1:"])
-
+ @show "---> 2"
aboutYourself =
"""
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
$(a.roleSpecificInstruction[a.role])
"""
-
+ @show "---> 3"
# assistant_plan_prompt =
# """
#
@@ -233,7 +235,7 @@ function planner_mistral_openorca(a::agentReflex)
# Plan:
# """
- assistant_plan_prompt =
+ prompt =
"""
<|im_start|>system
@@ -266,9 +268,9 @@ function planner_mistral_openorca(a::agentReflex)
Plan:
"""
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic], max_tokens=512,
+ timeout=180, stopword=["<|user|>", ""])
- response = sendReceivePrompt(a, assistant_plan_prompt, max_tokens=1024, temperature=0.1,
- timeout=180, stopword=["<|user|>", ""])
response = split(response, "<|")[1]
response = split(response, "")[1]
response = split(response, "\n\n")[1]
@@ -336,7 +338,8 @@ function updatePlan(a::agentReflex)
Updated plan:
"""
- result = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.1)
+ result = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ timeout=180, stopword=["<|", ""])
@show updatedPlan = result
a.memory[:shortterm]["Plan 1:"] = result
@@ -369,7 +372,7 @@ function selfAwareness(a::agentReflex)
aboutYourself =
"""
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
"""
@@ -420,8 +423,9 @@ function selfAwareness(a::agentReflex)
<|assistant|>
Info extraction:
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.4, timeout=180,
- stopword=["/n/n", "END", "End", "Obs", "<|", ""])
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["/n/n", "END", "End", "Obs", "<|", ""])
response = split(response, "<|")[1]
response = split(response, "")[1]
response = split(response, "|assistant|>")[1]
@@ -483,7 +487,7 @@ function sentenceToKeywordMemory(a::agentReflex)
aboutYourself =
"""
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
"""
@@ -531,8 +535,9 @@ function sentenceToKeywordMemory(a::agentReflex)
<|assistant|>
Info extraction:
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.2, timeout=180,
- stopword=["/n/n", "END", "End", "Obs", "<|", ""])
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.2, timeout=180,
+ stopword=["/n/n", "END", "End", "Obs", "<|", ""])
response = split(response, "<|")[1]
response = split(response, "")[1]
response = split(response, "|assistant|>")[1]
@@ -565,7 +570,7 @@ function keywordMemoryToPlanMatching(a::agentReflex)
aboutYourself =
"""
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
"""
@@ -608,8 +613,9 @@ function keywordMemoryToPlanMatching(a::agentReflex)
<|assistant|>
Info mapping:
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.4, timeout=180,
- stopword=["/n/n", "END", "End", "Obs", "<|", ""])
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["/n/n", "END", "End", "Obs", "<|", ""])
response = split(response, "<|")[1]
response = split(response, "")[1]
response = split(response, "|assistant|>")[1]
@@ -706,7 +712,7 @@ end
# # your should request the missing information first before making a decision
# aboutYourself =
# """
-# Your name is $(a.agentName)
+# Your name is $(a.name)
# $(a.roles[a.role])
# """
@@ -922,7 +928,7 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
# your should request the missing information first before making a decision
aboutYourself =
"""
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
"""
@@ -963,7 +969,7 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
<|system|>
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
@@ -994,7 +1000,7 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
<|system|>
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
@@ -1029,10 +1035,10 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
latestTask = nothing
while true # while Thought or Act is empty, run actor again
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
- response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.4, timeout=300,
- stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>"],
- seed=rand(1000000:2000000))
println("")
@show actor_raw = response
@@ -1273,13 +1279,16 @@ function work(a::agentReflex)
end
end
end
-
+
while true # Work loop
+
objective = nothing
# make new plan
if !haskey(a.memory[:shortterm], "Plan 1:")
+
plan = planner_mistral_openorca(a)
+
a.memory[:shortterm]["Plan $(a.attempt):"] = plan
a.memory[:log]["Plan $(a.attempt):"] = plan
a.task = 1 # reset because new plan is created
@@ -1295,7 +1304,7 @@ function work(a::agentReflex)
# enter actor loop
actorstate, msgToUser = actor(a)
-
+
if actorstate == "askbox"
response = msgToUser
workstate = actorstate
@@ -1488,7 +1497,9 @@ function writeEvaluationGuideline(a::agentReflex)
"""
- response = sendReceivePrompt(a, prompt)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
end
@@ -1553,7 +1564,9 @@ function grading(a, guideline::T, text::T) where {T<:AbstractString}
println("")
score = nothing
while true
- response = sendReceivePrompt(a, prompt, timeout=180)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
try
response = "{" * split(response, "}")[1] * "}"
@show response
@@ -1619,7 +1632,9 @@ function analyze(a)
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024, timeout=180)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
end
@@ -1677,7 +1692,9 @@ function selfReflext(a, analysis::T) where {T<:AbstractString}
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
end
@@ -1737,7 +1754,9 @@ function formulateUserResponse(a)
<|assistant|>
Recommendation:
"""
- response = sendReceivePrompt(a, prompt, max_tokens=1024, timeout=300)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=300,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
end
@@ -1803,7 +1822,9 @@ function extractinfo(a, text::T) where {T<:AbstractString}
Answer:
"""
- response = sendReceivePrompt(a, prompt, temperature=0.0)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
if occursin("Yes", response)
prompt =
"""
@@ -1819,7 +1840,9 @@ function extractinfo(a, text::T) where {T<:AbstractString}
"""
- response = sendReceivePrompt(a, prompt, temperature=0.0)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
else
return nothing
@@ -1861,7 +1884,9 @@ function updateEnvState(a, newinfo)
Updated Current State:\n
"""
- response = sendReceivePrompt(a, prompt, temperature=0.0)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
return response
end
@@ -1934,7 +1959,9 @@ function checkTaskCompletion(a)
"""
response = nothing
_response = nothing
- _response = sendReceivePrompt(a, prompt, max_tokens=1024)
+ _response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
@show checkTaskCompletion_raw = _response
_response = split(_response, "")[1]
_response = split(_response, "\n\n")[1]
@@ -2030,7 +2057,9 @@ function recap(a)
Extracted info:
"""
aware = "Self-awareness: map the info from the recap to the plan's tasks then state your mapping."
- response = sendReceivePrompt(a, prompt, max_tokens=1024, temperature=0.0)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
response = split(response, "")[1]
response = split(response, "<|")[1]
response = split(response, "\n\n")[1]
@@ -2121,7 +2150,7 @@ function readKeywordMemory(a; keywordmemory::Union{AbstractDict, Nothing}=nothin
<|system|>
- Your name is $(a.agentName)
+ Your name is $(a.name)
$(a.roles[a.role])
@@ -2148,7 +2177,9 @@ function readKeywordMemory(a; keywordmemory::Union{AbstractDict, Nothing}=nothin
<|assistant|>
"""
- response = sendReceivePrompt(a, prompt, max_tokens=512, temperature=0.0)
+ response = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
response = split(response, "|assistant|>")[1]
# store LLM readout string to result
diff --git a/src/llmfunction.jl b/src/llmfunction.jl
index 78a12ec..f3a456f 100644
--- a/src/llmfunction.jl
+++ b/src/llmfunction.jl
@@ -5,7 +5,8 @@ export wikisearch, winestock, askbox
using HTTP, JSON3, URIs, Random
using GeneralUtils
using ..type, ..utils
-#------------------------------------------------------------------------------------------------100
+
+# ---------------------------------------------- 100 --------------------------------------------- #
""" Search wikipedia.
@@ -218,7 +219,7 @@ function winestock(a::agentReflex, input::NamedTuple)
<|assistant|>
Think: 1) low to medium tannin is not explicitly stated, but assuming it falls within the range of low-medium tannin.
Info map: {\"wine type\": \"white\", \"intensity\": 5, \"sweetness\": 2, \"tannin\": 2, \"acidity\": 1, \"price\": 50}
- SQL: SELECT * FROM white WHERE intensity = 5 AND sweetness = 2 AND acidity = 1 AND tannin = 2 AND price <= 50;
+ SQL: SELECT * FROM wines WHERE wine_type = "red" AND intensity = 5 AND sweetness = 2 AND acidity = 1 AND tannin = 2 AND price <= 50;
|assistant|>
@@ -228,7 +229,7 @@ function winestock(a::agentReflex, input::NamedTuple)
<|assistant|>
Think: 1) medium sweet is not explicitly stated, but assuming it falls within the range of dry and off-dry.
Info map: {\"wine type\": \"Rose\", \"intensity\": 1, \"sweetness\": 3, \"tannin\": 2, \"acidity\": 3, \"price\": 22, \"food\":\"American dishes\"}
- SQL: SELECT * FROM rose WHERE intensity = 1 AND tannin = 2 AND (sweetness = 1 OR sweetness = 2) AND price <= 22 AND food = American;
+ SQL: SELECT * FROM wines WHERE wine_type = "white" AND intensity = 1 AND tannin = 2 AND (sweetness = 1 OR sweetness = 2) AND price <= 22 AND food = American;
|assistant|>
@@ -242,8 +243,9 @@ function winestock(a::agentReflex, input::NamedTuple)
@show db_prompt = prompt
_sql = nothing
while true
- _sql = sendReceivePrompt(a, prompt, max_tokens=256, temperature=0.4,
- stopword=["/n/n", "END", "End", "Obs", "<|", ""])
+ _sql = sendReceivePrompt(a, prompt, a.config[:text2text][:mqtttopic],
+ max_tokens=1024, temperature=0.4, timeout=180,
+ stopword=["Thought:", "Obs:", "<|system|>", "", "<|end|>", "<|user|>"])
_sql = split(_sql, ";")[1] * ";"
@show _sql
# check for valid SQL command
@@ -263,9 +265,9 @@ function winestock(a::agentReflex, input::NamedTuple)
# remove any blank character in front of a string
newsql = nothing
- for i in eachindex(_sql)
- if _sql[i] != ' '
- newsql = _sql[i:end]
+ for i in eachindex(db_sql)
+ if db_sql[i] != ' '
+ newsql = db_sql[i:end]
break
end
end
@@ -273,9 +275,9 @@ function winestock(a::agentReflex, input::NamedTuple)
body = newsql
uri = URI(scheme="http", host="192.168.88.12", port="9010", path="/sql", userinfo="root:root")
- r = HTTP.request("POST", uri, ["Accept" => "application/json", "NS"=>"yiem", "DB"=>"Blossom_wines"], body)
+ r = HTTP.request("POST", uri, ["Accept" => "application/json", "NS"=>"yiem", "DB"=>"wines"], body)
- a.memory[:r] = r
+ # a.memory[:r] = r
result = copy(JSON3.read(r.body))
diff --git a/src/type.jl b/src/type.jl
index a549397..1c2dcdf 100644
--- a/src/type.jl
+++ b/src/type.jl
@@ -170,7 +170,7 @@ function agentReflex(
1. "wine budget"
2. "wine type" (rose, white, red, sparkling, dessert)
3. "food pairing" that will be served with wine
- 4. "wine sweetness level" (dry to very sweet)
+ 4. "wine sweetness level" (low to very sweet)
5. "wine intensity level" (light to full bodied)
6. "wine tannin level" (low to high)
7. "wine acidity level" (low to high)
diff --git a/src/utils.jl b/src/utils.jl
index 0611e26..1485f4e 100644
--- a/src/utils.jl
+++ b/src/utils.jl
@@ -42,27 +42,13 @@ using ..type
Example\n
-----
```jldoctest
- julia> using GeneralUtils
- julia> msgMeta = generate_msgMeta("/agent/frontend/wine/chat/api/v1/txt/receive")
- Dict{Symbol, Union{Nothing, String}} with 13 entries:
- :msgPurpose => nothing
- :requestresponse => nothing
- :timestamp => "2024-03-15T08:10:23.909"
- :replyToMsgId => nothing
- :receiverId => nothing
- :getpost => nothing
- :msgId => "e3467028-1dc1-4678-a6f1-a074696ca07c"
- :acknowledgestatus => nothing
- :sendTopic => "/agent/frontend/wine/chat/api/v1/txt/receive"
- :receiverName => nothing
- :replyTopic => nothing
- :senderName => nothing
- :senderId => nothing
+ julia> sendReceivePrompt(agent, "I am a test", "/test/prompt", max_tokens=100, timeout=120,
+ temperature=0.2, stopword=["nostopwordyet"])
```
Signature\n
-----
-""" #WORKING correct docstring
+"""
function sendReceivePrompt(a::T1, prompt::String, sendtopic::String;
max_tokens::Integer=256, timeout::Integer=120, temperature::AbstractFloat=0.2,
stopword::T2=["nostopwordyet"],
@@ -75,6 +61,7 @@ function sendReceivePrompt(a::T1, prompt::String, sendtopic::String;
msgMeta = deepcopy(a.msgMeta)
msgMeta[:sendTopic] = sendtopic
msgMeta[:senderName] = "agent-wine-backend"
+ msgMeta[:senderId] = a.id
msgMeta[:receiverName] = "text2text"
msgMeta[:replyTopic] = a.config[:receiveinternal][:mqtttopic]
msgMeta[:msgId] = string(uuid4())