update work()

This commit is contained in:
2023-11-17 22:29:30 +00:00
parent 6cdff501aa
commit 004fb0c933

View File

@@ -117,25 +117,28 @@ function agentReact(
:description => "Useful for when you need to search the Internet",
:input => "Input should be a search query.",
:output => "",
# :func => internetsearch # function
:func => nothing # put function here
),
:chatbox=>Dict(
:name => "chatbox",
:description => "Useful for when you need to ask a customer what you need to know or to talk with them.",
:input => "Input should be a conversation to customer.",
:output => "" ,
:func => nothing,
),
:wineStock=>Dict(
:name => "wineStock",
:description => "useful for when you need to search for wine by your description, price, name or ID.",
:input => "Input should be a search query with as much details as possible.",
:output => "" ,
:func => nothing,
),
:nothing=>Dict(
:name => "nothing",
:description => "useful for when you don't need to use tools or actions",
:input => "No input is needed",
:output => "" ,
:func => nothing,
),
),
msgMeta::Dict=Dict(
@@ -366,7 +369,42 @@ function generatePrompt_react_mistral_openorca(a::T, usermsg::String,
return prompt
end
"""
Chat with llm.
```jldoctest
julia> using JSON3, UUIDs, Dates, FileIO, CommUtils, ChatAgent
julia> mqttClientSpec = (
clientName= "someclient", # name of this client
clientID= "$(uuid4())",
broker= "mqtt.yiem.ai",
pubtopic= (imgAI="img/api/v0.0.1/gpu/request",
txtAI="txt/api/v0.1.0/gpu/request"),
subtopic= (imgAI="agent/api/v0.1.0/img/respond",
txtAI="agent/api/v0.1.0/txt/respond"),
keepalive= 30,
)
julia> msgMeta = Dict(
:msgPurpose=> "updateStatus",
:from=> "agent",
:to=> "llmAI",
:requestrespond=> "request",
:sendto=> "", # destination topic
:replyTo=> "agent/api/v0.1.0/txt/respond", # requester ask responder to send reply to this topic
:repondToMsgId=> "", # responder is responding to this msg id
:taskstatus=> "", # "complete", "fail", "waiting" or other status
:timestamp=> Dates.now(),
:msgId=> "$(uuid4())",
)
julia> newAgent = ChatAgent.agentReact(
"Jene",
mqttClientSpec,
role=:assistant_react,
msgMeta=msgMeta
)
julia> respond = ChatAgent.conversation(newAgent, "Hi! how are you?")
```
"""
function conversation(a::T, usermsg::String) where {T<:agent}
userintend = identifyUserIntention(a, usermsg)
@show userintend
@@ -384,20 +422,14 @@ function conversation(a::T, usermsg::String) where {T<:agent}
_ = addNewMessage(a, "assistant", respond)
@show respond
elseif userintend == "wine" #WORKING
@show a.thought
if a.thought == "nothing" # new thought
a.context = conversationSummary(a)
_ = addNewMessage(a, "user", usermsg)
prompt = generatePrompt_react_mistral_openorca(a, usermsg)
@show prompt
respond = work(a, prompt)
error("wine done")
else # continue thought
error("wine done")
end
@@ -428,10 +460,13 @@ function conversation(a::T, usermsg::String) where {T<:agent}
return respond
end
"""
Continuously run llm functions except when llm is getting ANS: or chatbox.
"""
function work(a::T, prompt::String) where {T<:agent}
respond = nothing
while true
@show prompt
toolname = nothing
toolinput = nothing
@@ -449,43 +484,32 @@ function work(a::T, prompt::String) where {T<:agent}
@show chunkedtext
if headers[1][:char] == "ANS:"
a.thought = "nothing" # question finished, no more thought
respond = chunkedtext[1][:body]
a.thought = "nothing"
_ = addNewMessage(a, "assistant", respond)
break
else
# check for tool being called
ActInd = findDetectedCharacter(headers, "Act:")[1]
toolname = toolNameBeingCalled(chunkedtext[ActInd][:body], a.tools) #WORKING
toolname = toolNameBeingCalled(chunkedtext[ActInd][:body], a.tools)
toolinput = chunkedtext[ActInd+1][:body]
if toolname == "chatbox" # chat with user
a.thought *= toolinput
respond = toolinput
_ = addNewMessage(a, "assistant", respond)
break
else # function call
error("function call")
f = a.tools[Symbol(toolname)][:func]
_result = f(toolinput)
result = "Obs: $_result\n"
a.thought *= result
prompt = a.thought
end
break
end
end
return respond
end