diff --git a/src/interface.jl b/src/interface.jl index 459cf90..6c9ec50 100755 --- a/src/interface.jl +++ b/src/interface.jl @@ -66,6 +66,10 @@ function agentReact( mqttClientSpec::NamedTuple; role::Symbol=:assistant, roles::Dict=Dict( + :assistant => + """ + You are a helpful assistant. + """, :assistant_react => """ You are a helpful assistant. You don't know other people personal info previously. @@ -274,8 +278,6 @@ function generatePrompt_react_mistral_openorca(messages::Dict, systemMsg::String toollines *= toolline end - - prompt = replace(promptTemplate, "{tools}" => toollines) prompt = replace(promptTemplate, "{context}" => context) elseif role == "user" @@ -290,6 +292,35 @@ function generatePrompt_react_mistral_openorca(messages::Dict, systemMsg::String return prompt end +function generatePrompt_mistral_openorca(a::T, usermsg::String) where {T<:agent} + prompt = + """ + <|im_start|>system + {systemMsg} + <|im_end|> + Here are the context for the question: + {context} + """ + prompt = replace(prompt, "{systemMsg}" => a.roles[:assistant]) + + toolnames = "" + toollines = "" + for (toolname, v) in a.tools + toolline = "$toolname: $(v[:description]) $(v[:input]) $(v[:output])\n" + toollines *= toolline + toolnames *= "$toolname," + end + prompt = replace(prompt, "{toolnames}" => toolnames) + prompt = replace(prompt, "{tools}" => toollines) + + prompt = replace(prompt, "{context}" => a.context) + + prompt *= "<|im_start|>user\n" * usermsg * "\n<|im_end|>\n" + prompt *= "<|im_start|>assistant\n" + + return prompt +end + function generatePrompt_react_mistral_openorca(a::T, usermsg::String) where {T<:agent} prompt = """ @@ -325,12 +356,16 @@ end #WORKING function conversation(a::T, usermsg::String) where {T<:agent} userintend = identifyUserIntention(a, usermsg) - @show userintend - if userintend == "chat" #WORKING + + respond = nothing + if userintend == "chat" summary = conversationSummary(a) - @show summary - error("conversation done") - elseif userintend == "wine" + _ = addNewMessage(a, "user", usermsg) + prompt = generatePrompt_mistral_openorca(a, usermsg) + respond = sendReceivePrompt(a, prompt) + respond = replace(respond, "\n<|im_end|>" => "") + _ = addNewMessage(a, "assistant", respond) + elseif userintend == "wine" #WORKING elseif userintend == "thought" @@ -349,16 +384,16 @@ function conversation(a::T, usermsg::String) where {T<:agent} - if a.thought == "nothing" - a.context = conversationSummary(a) - addNewMessage(a, "user", usermsg) - prompt = generatePrompt_react_mistral_openorca(a, usermsg) - @show prompt + # if a.thought == "nothing" + # a.context = conversationSummary(a) + # addNewMessage(a, "user", usermsg) + # prompt = generatePrompt_react_mistral_openorca(a, usermsg) + # @show prompt - else + # else - end + # end @@ -366,8 +401,8 @@ function conversation(a::T, usermsg::String) where {T<:agent} - - + # error("conversation done") + return respond end #WORKING @@ -555,7 +590,7 @@ function identifyUserIntention(a::T, usermsg::String) where {T<:agent} end -function sendReceivePrompt(a::agent, prompt::String; timeout::Int=30) +function sendReceivePrompt(a::T, prompt::String; timeout::Int=30) where {T<:agent} a.msgMeta[:msgId] = "$(uuid4())" # new msg id for each msg msg = Dict( :msgMeta=> a.msgMeta,