This commit is contained in:
Your Name
2024-02-08 15:10:20 +07:00
parent 021c66c3c5
commit cb24340777
2 changed files with 75 additions and 90 deletions

View File

@@ -9,7 +9,7 @@ export agentReact, agentReflex,
using JSON3, DataStructures, Dates, UUIDs, HTTP, Random
using CommUtils, GeneralUtils
using ..type, ..utils
using ..type, ..utils, ..llmfunction
# ---------------------------------------------------------------------------- #
# pythoncall setting #
@@ -494,12 +494,16 @@ function selfAwareness(a::agentReflex)
chunkedtext = chunktext(response, headers[1:3])
println("")
_infomatch = chunkedtext["Info matching:"]
_infomatch = GeneralUtils.getStringBetweenCharacters(_infomatch, '{', '}', endCharLocation="next")
infomatch = copy(JSON3.read(_infomatch))
_infomatch = GeneralUtils.getStringBetweenCharacters(_infomatch, '{', '}', endCharLocation="end")
infomatch = GeneralUtils.JSON3read_stringKey(_infomatch)
# infomatch = copy(JSON3.read(_infomatch))
println("")
@show chunkedtext
println("")
@show infomatch
keywordMemoryUpdate!(a.memory[:keyword], infomatch)
response = "What I know about user:" * JSON3.write(a.memory[:keyword]) # * response
@@ -817,41 +821,6 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
end
end
# prompt =
# """
# <|system|>
# <About yourself>
# $aboutYourself
# </About yourself>
# <You have access to the following tools>
# $toollines
# </You have access to the following tools>
# <Your plan>
# $(a.memory[:shortterm]["Plan 1:"])
# </Your plan>
# <What I know about the user>
# $(JSON3.write(a.memory[:keyword]))
# </What I know about the user>
# <Your job>
# Use the following format:
# $thought
# Act: based on your thought what action to choose?, must be one of [{toolnames}].
# Actinput: your input to the action (pay attention to the tool's input)
# Obs: observed result of the action
# </Your job>
# <Example>
# <What I know about the user>
# $(readKeywordMemory(a))
# </What I know about the user>
# Thought: based on what you know, I think he also need to know whether there are any charging station near by his house. I should search the internet to get this info.
# Act: internetsearch
# Actinput: {\"internetsearch\": \"EV charging station near Bangkok\"}
# </Example>
# </s>
# <|assistant|>
# $startword
# """
"""
- Car type is SUV
- Brand is Lexus
@@ -894,9 +863,9 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
- Luxury level is high
</What I know about the user>
<|assistant|>
Thought: I still don't know what color the user like. I should ask the user.
Thought: Based on what I know about the user, I still don't know what color the user like. I should ask the user.
Act: askbox
Actinput: {\"askbox\": \"What color do you like?\"}
Actinput:
</|assistant|>
</Example 1>
</s>
@@ -1007,7 +976,6 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
end
if check_1 && check_2 && check_3 && check_4 && check_5 && check_6 && check_7
#TODO paraphrase selfaware
break
end
# print all check_1 to check_6
@@ -1034,21 +1002,27 @@ function actor_mistral_openorca(a::agentReflex, selfaware=nothing)
"Check $latestTask:",]
headers = detectCharacters(response, headerToDetect)
chunkedtext = chunktext(response, headers)
chunkedtext["Act $latestTask:"] = toolname
println("")
@show chunkedtext
toolinput = chunkedtext["Actinput $latestTask:"]
# because tools has JSON input but sometime LLM output is not JSON, we need to check.
if occursin("{", toolinput)
act = GeneralUtils.getStringBetweenCharacters(response, '{', '}', endCharLocation="end")
act = copy(JSON3.read(act))
chunkedtext["Actinput $latestTask:"] = JSON3.write(act[Symbol(toolname)])
a.memory[:c] = chunkedtext
toolinput = act[Symbol(toolname)]
end
# # because tools has JSON input but sometime LLM output is not JSON, we need to check.
# if occursin("{", toolinput)
# act = GeneralUtils.getStringBetweenCharacters(response, '{', '}', endCharLocation="end")
# act = copy(JSON3.read(act))
# println("")
# @show act
# chunkedtext["Actinput $latestTask:"] = JSON3.write(act[Symbol(toolname)])
# a.memory[:c] = chunkedtext
# toolinput = act[Symbol(toolname)]
# end
chunkedtext["Act $latestTask:"] = toolname
return (toolname=toolname, toolinput=toolinput, chunkedtext=chunkedtext, selfaware=selfaware)
end
@@ -1275,25 +1249,29 @@ function actor(a::agentReflex)
end
actorResult = actor_mistral_openorca(a, selfaware)
println("")
toolname, toolinput, chunkedtext, selfaware = actorResult
println("")
@show toolname
@show toolinput
println(typeof(toolinput))
println("")
addShortMem!(a.memory[:shortterm], chunkedtext)
println("")
if toolname == "askbox" # chat with user
msgToUser = toolinput
msgToUser = askbox(toolinput)
actorState = toolname
#WORKING add only a single Q1 to memory because LLM need to ask the user only 1 question at a time
latestTask = shortMemLatestTask(a.memory[:shortterm]) +1
chunkedtext["Actinput $latestTask:"] = msgToUser
addShortMem!(a.memory[:shortterm], chunkedtext)
break
elseif toolname == "finalanswer"
addShortMem!(a.memory[:shortterm], chunkedtext)
println(">>> already done")
actorState = "formulateFinalResponse"
break
else # function call
addShortMem!(a.memory[:shortterm], chunkedtext)
f = a.tools[toolname][:func]
toolresult = f(a, actorResult)
@show toolresult
@@ -1922,12 +1900,12 @@ end
""" Convert keyword memory into a string.
Arguments:
a, one of ChatAgent's agent.
keywordmemory, a dictionary of keyword memory.
Arguments\n
a : one of ChatAgent's agent.
keywordmemory : a dictionary of keyword memory.
Return:
a string of LLM readout from keyword memory
Return\n
result : a string of LLM readout from keyword memory
Example:
```jldoctest

View File

@@ -1,20 +1,19 @@
module llmfunction
export wikisearch, winestock
export wikisearch, winestock, askbox
using HTTP, JSON3, URIs, Random
using GeneralUtils
using ..type, ..utils
#------------------------------------------------------------------------------------------------100
"""
Search wikipedia.
""" Search wikipedia.
Arguments:
query (string): The query to search for
Arguments\n
query {string} : The query to search for
Returns:
string: The search result text from wikipedia
Returns\n
result {string} : The search result text from wikipedia
```jldoctest
julia> using HTTP, JSON3
julia> result = wikisearch("AMD")
@@ -61,37 +60,30 @@ end
"""
""" Search wine in stock.
Arguments:
Return:
Example:
Arguments\n
a : one of ChatAgent's agent.
Return\n
A JSON string of available wine
Example\n
```jldoctest
julia> using ChatAgent, CommUtils
julia> agent = ChatAgent.agentReflex("Jene")
julia> shorttermMemory = OrderedDict{String, Any}(
"user" => "What's the latest AMD GPU?",
"Plan 1:" => " To answer this question, I will need to search for the latest AMD GPU using the wikisearch tool.\n",
"Act 1:" => " wikisearch\n",
"Actinput 1:" => " amd gpu latest\n",
"Obs 1:" => "No info available for your search query.",
"Act 2:" => " wikisearch\n",
"Actinput 2:" => " amd graphics card latest\n",
"Obs 2:" => "No info available for your search query.")
julia> guideline = "\nEvaluation Guideline:\n1. Check if the user's question has been understood correctly.\n2. Evaluate the tasks taken to provide the information requested by the user.\n3. Assess whether the correct tools were used for the task.\n4. Determine if the user's request was successfully fulfilled.\n5. Identify any potential improvements or alternative approaches that could be used in the future.\n\nThe response should include:\n1. A clear understanding of the user's question.\n2. The tasks taken to provide the information requested by the user.\n3. An evaluation of whether the correct tools were used for the task.\n4. A confirmation or explanation if the user's request was successfully fulfilled.\n5. Any potential improvements or alternative approaches that could be used in the future."
julia> score = grading(agent, guideline, shorttermMemory)
julia> input = "{\"food\": \"pizza\", \"occasion\": \"anniversary\"}"
julia> result = winestock(agent, input)
"{"wine 1": {\"Winery\": \"Pichon Baron\", \"wine name\": \"Pauillac (Grand Cru Classé)\", \"grape variety\": \"Cabernet Sauvignon\", \"year\": 2010, \"price\": \"125 USD\", \"stock ID\": \"ar-17\"}, }"
```
"""
function winestock(a::agentReflex, input::NamedTuple)
println("")
@show input
wineSearchCriteria = GeneralUtils.JSON3read_stringKey(input[:toolinput])
newDict = Dict{String,Any}()
for (k,v) in input[:toolinput]
for (k,v) in wineSearchCriteria
println("k $k v $v")
newDict[string(k)] = v
end
@@ -99,7 +91,7 @@ function winestock(a::agentReflex, input::NamedTuple)
println("")
@show query
prompt =
"""
<|system|>
@@ -247,11 +239,26 @@ function winestock(a::agentReflex, input::NamedTuple)
end
""" Get the first JSON string questions.
Arguments\n
a : one of ChatAgent's agent.
input {JSON string} : message to the user
Return\n
a single message to the user
Example\n
```jldoctest
julia> input = "{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}"
julia> askbox(input)
"How are you doing?"
```
"""
function askbox(input::String)
dict = GeneralUtils.JSON3read_stringKey(input)
return dict["Q1"]
end