This commit is contained in:
narawat lamaiin
2024-04-28 08:54:59 +07:00
parent 739944effd
commit 5d091a0b26
2 changed files with 109 additions and 53 deletions

View File

@@ -188,7 +188,7 @@ function chat_mistral_openorca(a::agentReflex, prompttemplate="llama3")
$conversation
"""
_response = sendReceivePrompt(a, prompt, a.config[:text2textchat][:mqtttopic],
max_tokens=1024, timeout=180, stopword=["<|", "</"])
max_tokens=1024, timeout=180,)
response = _response
return response

View File

@@ -5,7 +5,8 @@ export sendReceivePrompt, chunktext, extractStepFromPlan, checkTotalTaskInPlan,
isUsePlans, conversationSummary, checkReasonableness, replaceHeaders,
addShortMem!, splittext, dictToString, removeHeaders, keepOnlyKeys, experience,
messagesToString, messagesToString_nomark, removeTrailingCharacters, shortMemLatestTask,
keywordMemoryUpdate!, formatLLMtext_llama3instruct, formatLLMtext
keywordMemoryUpdate!, formatLLMtext_llama3instruct, formatLLMtext_phi3instruct,
formatLLMtext
using UUIDs, Dates, DataStructures, HTTP, MQTTClient, JSON3
using GeneralUtils
@@ -1085,46 +1086,99 @@ function checkSimilarKey(dict::AbstractDict, key::AbstractString)
end
""" Convert a single chat dictionary into LLM model instruct format.
# Llama 3 instruct format example
<|system|>
You are a helpful AI assistant.<|end|>
<|user|>
I am going to Paris, what should I see?<|end|>
<|assistant|>
Paris, the capital of France, is known for its stunning architecture, art museums."<|end|>
<|user|>
What is so great about #1?<|end|>
<|assistant|>
# Arguments
- `name::T`
message owner name e.f. "system", "user" or "assistant"
- `text::T`
""" Convert a chat dictionary into LLM model instruct format.
# Return
- `formattedtext::String`
text formatted to model format
Arguments\n
-----
name::T
message owner name e.f. "system", "user" or "assistant"
text::T
# Example
```jldoctest
julia> using Revise
julia> using YiemAgent
julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
julia> formattedtext = YiemAgent.formatLLMtext_phi3instruct(d[:name], d[:text])
Return\n
-----
formattedtext::String
text formatted to model format
```
Example\n
-----
```jldoctest
julia> using Revise
julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
julia> formattedtext = formatLLMtext_llama3instruct(d[:name], d[:text])
```
Signature
"""
function formatLLMtext_phi3instruct(name::T, text::T) where {T<:AbstractString}
formattedtext =
"""
<|$name|>
$text<|end|>\n
"""
Signature\n
-----
return formattedtext
end
""" Convert a single chat dictionary into LLM model instruct format.
# Llama 3 instruct format example
<|begin_of_text|>
<|start_header_id|>system<|end_header_id|>
You are a helpful assistant.
<|eot_id|>
<|start_header_id|>user<|end_header_id|>
Get me an icecream.
<|eot_id|>
<|start_header_id|>assistant<|end_header_id|>
Go buy it yourself at 7-11.
<|eot_id|>
# Arguments
- `name::T`
message owner name e.f. "system", "user" or "assistant"
- `text::T`
# Return
- `formattedtext::String`
text formatted to model format
# Example
```jldoctest
julia> using Revise
julia> using YiemAgent
julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
julia> formattedtext = YiemAgent.formatLLMtext_llama3instruct(d[:name], d[:text])
"<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n"
```
Signature
"""
function formatLLMtext_llama3instruct(name::T, text::T) where {T<:AbstractString}
formattedtext =
if name == "system"
"""<|begin_of_text|>
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
"""
<|begin_of_text|>
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>\n
"""
else
"""
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>
<|start_header_id|>$name<|end_header_id|>
$text
<|eot_id|>\n
"""
end
@@ -1135,31 +1189,30 @@ end
""" Convert a chat messages in vector of dictionary into LLM model instruct format.
Arguments\n
-----
messages::Vector{Dict{Symbol, T}}
message owner name e.f. "system", "user" or "assistant"
formatname::T
format name to be used
# Arguments
- `messages::Vector{Dict{Symbol, T}}`
message owner name e.f. "system", "user" or "assistant"
- `formatname::T`
format name to be used
Return\n
-----
formattedtext::String
text formatted to model format
# Return
- `formattedtext::String`
text formatted to model format
Example\n
-----
```jldoctest
julia> using Revise
julia> chatmessage = [
Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",),
Dict(:name=> "user",:text=> "list me all planets in our solar system.",),
]
julia> formattedtext = formatLLMtext(chatmessage, "llama3instruct")
```
# Example
```jldoctest
julia> using Revise
julia> using YiemAgent
julia> chatmessage = [
Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",),
Dict(:name=> "user",:text=> "list me all planets in our solar system.",),
Dict(:name=> "assistant",:text=> "I'm sorry. I don't know. You tell me.",),
]
julia> formattedtext = YiemAgent.formatLLMtext(chatmessage, "llama3instruct")
"<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n <|start_header_id|>user<|end_header_id|>\n list me all planets in our solar system.\n <|eot_id|>\n <|start_header_id|>assistant<|end_header_id|>\n I'm sorry. I don't know. You tell me.\n <|eot_id|>\n"
```
Signature\n
-----
# Signature
"""
function formatLLMtext(messages::Vector{Dict{Symbol, T}},
formatname::String="llama3instruct") where {T<:Any}
@@ -1167,6 +1220,8 @@ function formatLLMtext(messages::Vector{Dict{Symbol, T}},
formatLLMtext_llama3instruct
elseif formatname == "mistral"
# not define yet
elseif formatname == "phi3instruct"
formatLLMtext_phi3instruct
else
error("$formatname template not define yet")
end
@@ -1176,6 +1231,10 @@ function formatLLMtext(messages::Vector{Dict{Symbol, T}},
str *= f(t[:name], t[:text])
end
if formatname == "phi3instruct"
str *= "<|assistant|>\n"
end
return str
end
@@ -1202,9 +1261,6 @@ end