This commit is contained in:
narawat lamaiin
2024-04-23 17:26:34 +07:00
parent d833d5d22e
commit 9f5efb2948
3 changed files with 193 additions and 25 deletions

View File

@@ -47,15 +47,15 @@ using ..type, ..utils, ..llmfunction
julia> addNewMessage(agent1, "user", "Where should I go to buy snacks")
```
"""
function addNewMessage(a::T1, role::String, content::T2) where {T1<:agent, T2<:AbstractString}
if role a.availableRole # guard against typo
error("role is not in agent.availableRole $(@__LINE__)")
function addNewMessage(a::T1, name::String, content::T2) where {T1<:agent, T2<:AbstractString}
if name a.availableRole # guard against typo
error("name is not in agent.availableRole $(@__LINE__)")
end
# check whether user messages exceed limit
userMsg = 0
for i in a.messages
if i[:role] == "user"
if i[:name] == "user"
userMsg += 1
end
end
@@ -66,7 +66,7 @@ function addNewMessage(a::T1, role::String, content::T2) where {T1<:agent, T2<:A
messageleft = a.maxUserMsg
else
userMsg += 1
d = Dict(:role=> role, :content=> content, :timestamp=> Dates.now())
d = Dict(:name=> name, :text=> content, :timestamp=> Dates.now())
push!(a.messages, d)
messageleft = a.maxUserMsg - userMsg
end
@@ -95,7 +95,66 @@ function removeLatestMsg(a::T) where {T<:agent}
end
end
function chat_mistral_openorca(a::agentReflex)
# function chat_mistral_openorca(a::agentReflex, prompttemplate="llama3")
# """
# general prompt format:
# "
# <|system|>
# {role}
# {tools}
# {thinkingFormat}
# {context}
# <|im_end|>
# <|im_start|>user
# {usermsg}
# <|im_end|>
# <|im_start|>assistant
# "
# Note:
# {context} =
# "
# {earlierConversation}
# {env state}
# {shortterm memory}
# {longterm memory}
# "
# """
# conversation = messagesToString(a.messages)
# aboutYourself =
# """
# Your name is $(a.name)
# $(a.roles[a.role])
# """
# prompt =
# """
# <|system|>
# <About yourself>
# $aboutYourself
# </About yourself>
# </s>
# $conversation
# <|assistant|>
# """
# response = sendReceivePrompt(a, prompt, a.config[:text2textchat][:mqtttopic],
# timeout=180, stopword=["<|", "</"])
# response = split(response, "<|")[1]
# response = split(response, "</")[1]
# return response
# end
function chat_mistral_openorca(a::agentReflex, prompttemplate="llama3")
"""
general prompt format:
@@ -123,23 +182,11 @@ function chat_mistral_openorca(a::agentReflex)
"
"""
conversation = messagesToString(a.messages)
aboutYourself =
"""
Your name is $(a.name)
$(a.roles[a.role])
"""
conversation = formatLLMtext(a.messages, "llama3instruct")
prompt =
"""
<|system|>
<About yourself>
$aboutYourself
</About yourself>
</s>
$conversation
<|assistant|>
"""
response = sendReceivePrompt(a, prompt, a.config[:text2textchat][:mqtttopic],
@@ -1275,7 +1322,7 @@ function work(a::agentReflex)
latestTask = shortMemLatestTask(a.memory[:shortterm])
if haskey(a.memory[:shortterm], "Act $latestTask:")
if occursin("askbox", a.memory[:shortterm]["Act $latestTask:"])
a.memory[:shortterm]["Obs $latestTask:"] = "(user response) " * a.messages[end][:content]
a.memory[:shortterm]["Obs $latestTask:"] = "(user response) " * a.messages[end][:text]
end
end
end

View File

@@ -229,6 +229,10 @@ function agentReflex(
roles = roles,
roleSpecificInstruction = roleSpecificInstruction,
)
systemChatMsg = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
push!(newAgent.messages, systemChatMsg)
return newAgent

View File

@@ -5,7 +5,7 @@ export sendReceivePrompt, chunktext, extractStepFromPlan, checkTotalTaskInPlan,
isUsePlans, conversationSummary, checkReasonableness, replaceHeaders,
addShortMem!, splittext, dictToString, removeHeaders, keepOnlyKeys, experience,
messagesToString, messagesToString_nomark, removeTrailingCharacters, shortMemLatestTask,
keywordMemoryUpdate!
keywordMemoryUpdate!, formatLLMtext_llama3instruct, formatLLMtext
using UUIDs, Dates, DataStructures, HTTP, MQTTClient, JSON3
using GeneralUtils
@@ -80,7 +80,7 @@ function sendReceivePrompt(a::T1, prompt::String, sendtopic::String;
# send prompt
@show outgoing_msg
publish(a.mqttClient, outgoing_msg[:msgMeta][:sendTopic],
JSON3.write(outgoing_msg))
JSON3.write(outgoing_msg))
starttime = Dates.now()
result = nothing
@@ -88,9 +88,10 @@ function sendReceivePrompt(a::T1, prompt::String, sendtopic::String;
while true
timepass = GeneralUtils.time_difference(starttime, Dates.now(), "seconds")
if isready(a.mqttMsg_internal)
payload = take!(a.mqttMsg_internal)
if payload[:msgMeta][:replyToMsgId] == outgoing_msg[:msgMeta][:msgId]
result = haskey(payload, :text) ? payload[:text] : nothing
incomingMsg = take!(a.mqttMsg_internal)
incomingPayload = incomingMsg[:payload]
if incomingMsg[:msgMeta][:replyToMsgId] == outgoing_msg[:msgMeta][:msgId]
result = haskey(incomingPayload, :text) ? incomingPayload[:text] : nothing
break
end
elseif timepass <= timeout
@@ -546,6 +547,29 @@ function messagesToString(messages::AbstractVector{T}; addressAIas="assistant")
return conversation
end
function formatLLMtext_llama3(name::T, text::T, isbegintext::Bool=false) where {T<:AbstractString}
formattedtext =
if isbegintext
"""<|begin_of_text|>
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
"""
else
"""
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
"""
end
return formattedtext
end
function messagesToString(name::T, text::T, templateName::T) where {T<:AbstractString}
end
# function messagesToString(messages::AbstractVector{T}; addressAIas="assistant") where {T<:AbstractDict}
# conversation = ""
# if length(messages)!= 0
@@ -1063,6 +1087,99 @@ end
""" Convert a chat dictionary into LLM model instruct format.
Arguments\n
-----
name::T
message owner name e.f. "system", "user" or "assistant"
text::T
Return\n
-----
formattedtext::String
text formatted to model format
Example\n
-----
```jldoctest
julia> using Revise
julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
julia> formattedtext = formatLLMtext_llama3instruct(d[:name], d[:text])
```
Signature\n
-----
"""
function formatLLMtext_llama3instruct(name::T, text::T) where {T<:AbstractString}
formattedtext =
if name == "system"
"""<|begin_of_text|>
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
"""
else
"""
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
"""
end
return formattedtext
end
""" Convert a chat messages in vector of dictionary into LLM model instruct format.
Arguments\n
-----
messages::Vector{Dict{Symbol, T}}
message owner name e.f. "system", "user" or "assistant"
formatname::T
format name to be used
Return\n
-----
formattedtext::String
text formatted to model format
Example\n
-----
```jldoctest
julia> using Revise
julia> chatmessage = [
Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",),
Dict(:name=> "user",:text=> "list me all planets in our solar system.",),
]
julia> formattedtext = formatLLMtext(chatmessage, "llama3instruct")
```
Signature\n
-----
"""
function formatLLMtext(messages::Vector{Dict{Symbol, T}},
formatname::String="llama3instruct") where {T<:Any}
f = if formatname == "llama3instruct"
formatLLMtext_llama3instruct
elseif formatname == "mistral"
# not define yet
else
error("$formatname template not define yet")
end
str = ""
for t in messages
str *= f(t[:name], t[:text])
end
return str
end