This commit is contained in:
narawat lamaiin
2024-12-09 20:48:45 +07:00
parent 4f1280daa3
commit 9aef993813
47 changed files with 16623 additions and 12554 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -1,47 +1,47 @@
module YiemAgent
# export agent
""" Order by dependencies of each file. The 1st included file must not depend on any other
files and each file can only depend on the file included before it.
"""
include("type.jl")
using .type
include("util.jl")
using .util
include("llmfunction.jl")
using .llmfunction
include("interface.jl")
using .interface
# ---------------------------------------------- 100 --------------------------------------------- #
end # module YiemAgent_v1
module YiemAgent
# export agent
""" Order by dependencies of each file. The 1st included file must not depend on any other
files and each file can only depend on the file included before it.
"""
include("type.jl")
using .type
include("util.jl")
using .util
include("llmfunction.jl")
using .llmfunction
include("interface.jl")
using .interface
# ---------------------------------------------- 100 --------------------------------------------- #
end # module YiemAgent_v1

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,268 +1,262 @@
module type
export agent, sommelier, companion
using Dates, UUIDs, DataStructures, JSON3
using GeneralUtils
# ---------------------------------------------- 100 --------------------------------------------- #
abstract type agent end
mutable struct companion <: agent
name::String # agent name
id::String # agent id
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
# communication function
text2textInstructLLM::Function
end
function companion(
text2textInstructLLM::Function
;
name::String= "Assistant",
id::String= string(uuid4()),
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> Vector{Dict{Symbol, String}}(),
:events=> Vector{Dict{Symbol, Any}}()
)
newAgent = companion(
name,
id,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM
)
return newAgent
end
""" A sommelier agent.
# Arguments
- `mqttClient::Client`
MQTTClient's client
- `msgMeta::Dict{Symbol, Any}`
A dict contain info about a message.
- `config::Dict{Symbol, Any}`
Config info for an agent. Contain mqtt topic for internal use and other info.
# Keyword Arguments
- `name::String`
Agent's name
- `id::String`
Agent's ID
- `tools::Dict{Symbol, Any}`
Agent's tools
- `maxHistoryMsg::Integer`
max history message
# Return
- `nothing`
# Example
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> msgMeta = GeneralUtils.generate_msgMeta(
"N/A",
replyTopic = "/testtopic/prompt"
)
julia> tools= Dict(
:chatbox=>Dict(
:name => "chatbox",
:description => "Useful only for when you need to ask the user for more info or context. Do not ask the user their own question.",
:input => "Input should be a text.",
:output => "" ,
:func => nothing,
),
)
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "/testtopic/prompt", # topic to receive prompt i.e. frontend send msg to this topic
),
:receiveinternal=>Dict(
:mqtttopic=> "/testtopic/internal", # receive topic for model's internal
),
:text2text=>Dict(
:mqtttopic=> "/text2text/receive",
),
)
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> agent = YiemAgent.bsommelier(
client,
msgMeta,
agentConfig,
name= "assistant",
id= "555", # agent instance id
tools=tools,
)
```
# TODO
- [] update docstring
- [x] implement the function
# Signature
"""
mutable struct sommelier <: agent
name::String # agent name
id::String # agent id
retailername::String
tools::Dict
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
# communication function
text2textInstructLLM::Function
executeSQL::Function
querySQLVectorDB::Function
addSQLVectorDB::Function
end
function sommelier(
text2textInstructLLM::Function,
executeSQL::Function,
querySQLVectorDB::Function,
addSQLVectorDB::Function
;
name::String= "Assistant",
id::String= string(uuid4()),
retailername::String= "retailer_name",
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
tools = Dict( # update input format
"chatbox"=> Dict(
:description => "<askbox tool description>Useful for when you need to ask the user for more context. Do not ask the user their own question.</askbox tool description>",
:input => """<input>Input is a text in JSON format.</input><input example>{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}</input example>""",
:output => "" ,
),
"winestock"=> Dict(
:description => "<winestock tool description>A handy tool for searching wine in your inventory that match the user preferences.</winestock tool description>",
:input => """<input>Input is a JSON-formatted string that contains a detailed and precise search query.</input><input example>{\"wine type\": \"rose\", \"price\": \"max 35\", \"sweetness level\": \"sweet\", \"intensity level\": \"light bodied\", \"Tannin level\": \"low\", \"Acidity level\": \"low\"}</input example>""",
:output => """<output>Output are wines that match the search query in JSON format.""",
),
# "finalanswer"=> Dict(
# :description => "<tool description>Useful for when you are ready to recommend wines to the user.</tool description>",
# :input => """<input format>{\"finalanswer\": \"some text\"}.</input format><input example>{\"finalanswer\": \"I recommend Zena Crown Vista\"}</input example>""",
# :output => "" ,
# :func => nothing,
# ),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> Vector{Dict{Symbol, String}}(),
:events=> Vector{Dict{Symbol, Any}}()
)
newAgent = sommelier(
name,
id,
retailername,
tools,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM,
executeSQL,
querySQLVectorDB,
addSQLVectorDB
)
return newAgent
end
module type
export agent, sommelier, companion
using Dates, UUIDs, DataStructures, JSON3
using GeneralUtils
# ---------------------------------------------- 100 --------------------------------------------- #
abstract type agent end
mutable struct companion <: agent
name::String # agent name
id::String # agent id
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
# communication function
text2textInstructLLM::Function
end
function companion(
text2textInstructLLM::Function
;
name::String= "Assistant",
id::String= string(uuid4()),
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> OrderedDict{Symbol, Any}(),
:events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}(),
)
newAgent = companion(
name,
id,
maxHistoryMsg,
chathistory,
memory,
text2textInstructLLM
)
return newAgent
end
""" A sommelier agent.
# Arguments
- `mqttClient::Client`
MQTTClient's client
- `msgMeta::Dict{Symbol, Any}`
A dict contain info about a message.
- `config::Dict{Symbol, Any}`
Config info for an agent. Contain mqtt topic for internal use and other info.
# Keyword Arguments
- `name::String`
Agent's name
- `id::String`
Agent's ID
- `tools::Dict{Symbol, Any}`
Agent's tools
- `maxHistoryMsg::Integer`
max history message
# Return
- `nothing`
# Example
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> msgMeta = GeneralUtils.generate_msgMeta(
"N/A",
replyTopic = "/testtopic/prompt"
)
julia> tools= Dict(
:chatbox=>Dict(
:name => "chatbox",
:description => "Useful only for when you need to ask the user for more info or context. Do not ask the user their own question.",
:input => "Input should be a text.",
:output => "" ,
:func => nothing,
),
)
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "/testtopic/prompt", # topic to receive prompt i.e. frontend send msg to this topic
),
:receiveinternal=>Dict(
:mqtttopic=> "/testtopic/internal", # receive topic for model's internal
),
:text2text=>Dict(
:mqtttopic=> "/text2text/receive",
),
)
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> agent = YiemAgent.bsommelier(
client,
msgMeta,
agentConfig,
name= "assistant",
id= "555", # agent instance id
tools=tools,
)
```
# TODO
- [] update docstring
- [x] implement the function
# Signature
"""
mutable struct sommelier <: agent
name::String # agent name
id::String # agent id
retailername::String
tools::Dict
maxHistoryMsg::Integer # e.g. 21th and earlier messages will get summarized
""" Memory
Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
NO "system" message in chathistory because I want to add it at the inference time
chathistory= [
Dict(:name=>"user", :text=> "Wassup!", :timestamp=> Dates.now()),
Dict(:name=>"assistant", :text=> "Hi I'm your assistant.", :timestamp=> Dates.now()),
]
"""
chathistory::Vector{Dict{Symbol, Any}}
memory::Dict{Symbol, Any}
func # NamedTuple of functions
end
function sommelier(
func, # NamedTuple of functions
;
name::String= "Assistant",
id::String= string(uuid4()),
retailername::String= "retailer_name",
maxHistoryMsg::Integer= 20,
chathistory::Vector{Dict{Symbol, String}} = Vector{Dict{Symbol, String}}(),
)
tools = Dict( # update input format
"chatbox"=> Dict(
:description => "<askbox tool description>Useful for when you need to ask the user for more context. Do not ask the user their own question.</askbox tool description>",
:input => """<input>Input is a text in JSON format.</input><input example>{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}</input example>""",
:output => "" ,
),
"winestock"=> Dict(
:description => "<winestock tool description>A handy tool for searching wine in your inventory that match the user preferences.</winestock tool description>",
:input => """<input>Input is a JSON-formatted string that contains a detailed and precise search query.</input><input example>{\"wine type\": \"rose\", \"price\": \"max 35\", \"sweetness level\": \"sweet\", \"intensity level\": \"light bodied\", \"Tannin level\": \"low\", \"Acidity level\": \"low\"}</input example>""",
:output => """<output>Output are wines that match the search query in JSON format.""",
),
# "finalanswer"=> Dict(
# :description => "<tool description>Useful for when you are ready to recommend wines to the user.</tool description>",
# :input => """<input format>{\"finalanswer\": \"some text\"}.</input format><input example>{\"finalanswer\": \"I recommend Zena Crown Vista\"}</input example>""",
# :output => "" ,
# :func => nothing,
# ),
)
memory = Dict{Symbol, Any}(
:chatbox=> "",
:shortmem=> OrderedDict{Symbol, Any}(),
:events=> Vector{Dict{Symbol, Any}}(),
:state=> Dict{Symbol, Any}(
:wine_presented_to_user=> "None",
),
)
newAgent = sommelier(
name,
id,
retailername,
tools,
maxHistoryMsg,
chathistory,
memory,
func
)
return newAgent
end
end # module type

View File

@@ -1,450 +1,453 @@
module util
export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises
using UUIDs, Dates, DataStructures, HTTP, MQTTClient, JSON3
using GeneralUtils
using ..type
# ---------------------------------------------- 100 --------------------------------------------- #
""" Clear agent chat history.
# Arguments
- `a::agent`
an agent
# Return
- nothing
# Example
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> connect(client, connection)
julia> msgMeta = GeneralUtils.generate_msgMeta("testtopic")
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "testtopic/receive",
),
:receiveinternal=>Dict(
:mqtttopic=> "testtopic/internal",
),
:text2text=>Dict(
:mqtttopic=> "testtopic/text2text",
),
)
julia> a = YiemAgent.sommelier(
client,
msgMeta,
agentConfig,
)
julia> YiemAgent.addNewMessage(a, "user", "hello")
julia> YiemAgent.clearhistory(a)
```
# TODO
- [PENDING] clear memory
# Signature
"""
function clearhistory(a::T) where {T<:agent}
empty!(a.chathistory)
empty!(a.memory[:shortmem])
empty!(a.memory[:events])
a.memory[:chatbox] = ""
end
""" Add new message to agent.
Arguments\n
-----
a::agent
an agent
role::String
message sender role i.e. system, user or assistant
text::String
message text
Return\n
-----
nothing
Example\n
-----
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> connect(client, connection)
julia> msgMeta = GeneralUtils.generate_msgMeta("testtopic")
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "testtopic/receive",
),
:receiveinternal=>Dict(
:mqtttopic=> "testtopic/internal",
),
:text2text=>Dict(
:mqtttopic=> "testtopic/text2text",
),
)
julia> a = YiemAgent.sommelier(
client,
msgMeta,
agentConfig,
)
julia> YiemAgent.addNewMessage(a, "user", "hello")
```
Signature\n
-----
"""
function addNewMessage(a::T1, name::String, text::T2;
maximumMsg::Integer=20) where {T1<:agent, T2<:AbstractString}
if name ["system", "user", "assistant"] # guard against typo
error("name is not in agent.availableRole $(@__LINE__)")
end
#[] summarize the oldest 10 message
if length(a.chathistory) > maximumMsg
summarize(a.chathistory)
else
d = Dict(:name=> name, :text=> text, :timestamp=> Dates.now())
push!(a.chathistory, d)
end
end
"""
# Arguments
- `v::Integer`
dummy variable
# Return
# Example
```jldoctest
julia>
```
# TODO
- [] update docstring
- [x] implement the function
# Signature
"""
function vectorOfDictToText(vecd::Vector; withkey=true)
text = ""
if withkey
for d in vecd
name = d[:name]
_text = d[:text]
text *= "$name> $_text \n"
end
else
for d in vecd
for (k, v) in d
text *= "$v \n"
end
end
end
return text
end
function eventdict(;
event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing,
subject::Union{String, Nothing}=nothing,
action_or_dialogue::Union{String, Nothing}=nothing,
location::Union{String, Nothing}=nothing,
equipment_used::Union{String, Nothing}=nothing,
material_used::Union{String, Nothing}=nothing,
outcome::Union{String, Nothing}=nothing,
note::Union{String, Nothing}=nothing,
)
return Dict{Symbol, Any}(
:event_description=> event_description,
:timestamp=> timestamp,
:subject=> subject,
:action_or_dialogue=> action_or_dialogue,
:location=> location,
:equipment_used=> equipment_used,
:material_used=> material_used,
:outcome=> outcome,
:note=> note,
)
end
noise(n::Integer) = String(rand('a':'z', n))
function noises(totalword::Integer, wordlength::Integer)
noises = ""
for i in 1:totalword
noises *= noise(wordlength) * " "
end
noises = strip(noises)
return noises
end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example
# <|system|>
# You are a helpful AI assistant.<|end|>
# <|user|>
# I am going to Paris, what should I see?<|end|>
# <|assistant|>
# Paris, the capital of France, is known for its stunning architecture, art museums."<|end|>
# <|user|>
# What is so great about #1?<|end|>
# <|assistant|>
# # Arguments
# - `name::T`
# message owner name e.f. "system", "user" or "assistant"
# - `text::T`
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
# julia> formattedtext = YiemAgent.formatLLMtext_phi3instruct(d[:name], d[:text])
# ```
# Signature
# """
# function formatLLMtext_phi3instruct(name::T, text::T) where {T<:AbstractString}
# formattedtext =
# """
# <|$name|>
# $text<|end|>\n
# """
# return formattedtext
# end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example
# <|begin_of_text|>
# <|start_header_id|>system<|end_header_id|>
# You are a helpful assistant.
# <|eot_id|>
# <|start_header_id|>user<|end_header_id|>
# Get me an icecream.
# <|eot_id|>
# <|start_header_id|>assistant<|end_header_id|>
# Go buy it yourself at 7-11.
# <|eot_id|>
# # Arguments
# - `name::T`
# message owner name e.f. "system", "user" or "assistant"
# - `text::T`
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
# julia> formattedtext = YiemAgent.formatLLMtext_llama3instruct(d[:name], d[:text])
# "<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n"
# ```
# Signature
# """
# function formatLLMtext_llama3instruct(name::T, text::T) where {T<:AbstractString}
# formattedtext =
# if name == "system"
# """
# <|begin_of_text|>
# <|start_header_id|>$name<|end_header_id|>
# $text
# <|eot_id|>
# """
# else
# """
# <|start_header_id|>$name<|end_header_id|>
# $text
# <|eot_id|>
# """
# end
# return formattedtext
# end
# """ Convert a chat messages in vector of dictionary into LLM model instruct format.
# # Arguments
# - `messages::Vector{Dict{Symbol, T}}`
# message owner name e.f. "system", "user" or "assistant"
# - `formatname::T`
# format name to be used
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> chatmessage = [
# Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",),
# Dict(:name=> "user",:text=> "list me all planets in our solar system.",),
# Dict(:name=> "assistant",:text=> "I'm sorry. I don't know. You tell me.",),
# ]
# julia> formattedtext = YiemAgent.formatLLMtext(chatmessage, "llama3instruct")
# "<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n <|start_header_id|>user<|end_header_id|>\n list me all planets in our solar system.\n <|eot_id|>\n <|start_header_id|>assistant<|end_header_id|>\n I'm sorry. I don't know. You tell me.\n <|eot_id|>\n"
# ```
# # Signature
# """
# function formatLLMtext(messages::Vector{Dict{Symbol, T}},
# formatname::String="llama3instruct") where {T<:Any}
# f = if formatname == "llama3instruct"
# formatLLMtext_llama3instruct
# elseif formatname == "mistral"
# # not define yet
# elseif formatname == "phi3instruct"
# formatLLMtext_phi3instruct
# else
# error("$formatname template not define yet")
# end
# str = ""
# for t in messages
# str *= f(t[:name], t[:text])
# end
# # add <|assistant|> so that the model don't generate it and I don't need to clean it up later
# if formatname == "phi3instruct"
# str *= "<|assistant|>\n"
# end
# return str
# end
# """
# Arguments\n
# -----
# Return\n
# -----
# Example\n
# -----
# ```jldoctest
# julia>
# ```
# TODO\n
# -----
# [] update docstring
# [PENDING] implement the function
# Signature\n
# -----
# """
# function iterativeprompting(a::T, prompt::String, verification::Function) where {T<:agent}
# msgMeta = GeneralUtils.generate_msgMeta(
# a.config[:externalService][:text2textinstruct],
# senderName= "iterativeprompting",
# senderId= a.id,
# receiverName= "text2textinstruct",
# )
# outgoingMsg = Dict(
# :msgMeta=> msgMeta,
# :payload=> Dict(
# :text=> prompt,
# )
# )
# success = nothing
# result = nothing
# critique = ""
# # iteration loop
# while true
# # send prompt to LLM
# response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg)
# error("--> iterativeprompting")
# # check for correctness and get feedback
# success, _critique = verification(response)
# if success
# result = response
# break
# else
# # add critique to prompt
# critique *= _critique * "\n"
# replace!(prompt, "Critique: ..." => "Critique: $critique")
# end
# end
# return (success=success, result=result)
# end
module util
export clearhistory, addNewMessage, vectorOfDictToText, eventdict, noises
using UUIDs, Dates, DataStructures, HTTP, MQTTClient, JSON3
using GeneralUtils
using ..type
# ---------------------------------------------- 100 --------------------------------------------- #
""" Clear agent chat history.
# Arguments
- `a::agent`
an agent
# Return
- nothing
# Example
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> connect(client, connection)
julia> msgMeta = GeneralUtils.generate_msgMeta("testtopic")
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "testtopic/receive",
),
:receiveinternal=>Dict(
:mqtttopic=> "testtopic/internal",
),
:text2text=>Dict(
:mqtttopic=> "testtopic/text2text",
),
)
julia> a = YiemAgent.sommelier(
client,
msgMeta,
agentConfig,
)
julia> YiemAgent.addNewMessage(a, "user", "hello")
julia> YiemAgent.clearhistory(a)
```
# TODO
- [PENDING] clear memory
# Signature
"""
function clearhistory(a::T) where {T<:agent}
empty!(a.chathistory)
empty!(a.memory[:shortmem])
empty!(a.memory[:events])
a.memory[:chatbox] = ""
end
""" Add new message to agent.
Arguments\n
-----
a::agent
an agent
role::String
message sender role i.e. system, user or assistant
text::String
message text
Return\n
-----
nothing
Example\n
-----
```jldoctest
julia> using YiemAgent, MQTTClient, GeneralUtils
julia> client, connection = MakeConnection("test.mosquitto.org", 1883)
julia> connect(client, connection)
julia> msgMeta = GeneralUtils.generate_msgMeta("testtopic")
julia> agentConfig = Dict(
:receiveprompt=>Dict(
:mqtttopic=> "testtopic/receive",
),
:receiveinternal=>Dict(
:mqtttopic=> "testtopic/internal",
),
:text2text=>Dict(
:mqtttopic=> "testtopic/text2text",
),
)
julia> a = YiemAgent.sommelier(
client,
msgMeta,
agentConfig,
)
julia> YiemAgent.addNewMessage(a, "user", "hello")
```
Signature\n
-----
"""
function addNewMessage(a::T1, name::String, text::T2;
maximumMsg::Integer=20) where {T1<:agent, T2<:AbstractString}
if name ["system", "user", "assistant"] # guard against typo
error("name is not in agent.availableRole $(@__LINE__)")
end
#[] summarize the oldest 10 message
if length(a.chathistory) > maximumMsg
summarize(a.chathistory)
else
d = Dict(:name=> name, :text=> text, :timestamp=> Dates.now())
push!(a.chathistory, d)
end
end
""" Converts a vector of dictionaries to a formatted string.
This function takes in a vector of dictionaries and outputs a single string where each dictionary's keys are prefixed by their values.
# Arguments
- `vecd::Vector`
a vector of dictionaries
- `withkey::Bool`
whether to include the key in the output text. Default is true
# Return
a string with the formatted dictionaries
# Example
```jldoctest
julia> using Revise
julia> using GeneralUtils
julia> vecd = [Dict(:name => "John", :text => "Hello"), Dict(:name => "Jane", :text => "Goodbye")]
julia> GeneralUtils.vectorOfDictToText(vecd, withkey=true)
"John> Hello\nJane> Goodbye\n"
```
# Signature
"""
function vectorOfDictToText(vecd::Vector; withkey=true)::String
# Initialize an empty string to hold the final text
text = ""
# Determine whether to include the key in the output text or not
if withkey
# Loop through each dictionary in the input vector
for d in vecd
# Extract the 'name' and 'text' keys from the dictionary
name = d[:name]
_text = d[:text]
# Append the formatted string to the text variable
text *= "$name> $_text \n"
end
else
# Loop through each dictionary in the input vector
for d in vecd
# Iterate over all key-value pairs in the dictionary
for (k, v) in d
# Append the formatted string to the text variable
text *= "$v \n"
end
end
end
# Return the final text
return text
end
function eventdict(;
event_description::Union{String, Nothing}=nothing,
timestamp::Union{DateTime, Nothing}=nothing,
subject::Union{String, Nothing}=nothing,
action_or_dialogue::Union{String, Nothing}=nothing,
location::Union{String, Nothing}=nothing,
equipment_used::Union{String, Nothing}=nothing,
material_used::Union{String, Nothing}=nothing,
outcome::Union{String, Nothing}=nothing,
note::Union{String, Nothing}=nothing,
)
return Dict{Symbol, Any}(
:event_description=> event_description,
:timestamp=> timestamp,
:subject=> subject,
:action_or_dialogue=> action_or_dialogue,
:location=> location,
:equipment_used=> equipment_used,
:material_used=> material_used,
:outcome=> outcome,
:note=> note,
)
end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example
# <|system|>
# You are a helpful AI assistant.<|end|>
# <|user|>
# I am going to Paris, what should I see?<|end|>
# <|assistant|>
# Paris, the capital of France, is known for its stunning architecture, art museums."<|end|>
# <|user|>
# What is so great about #1?<|end|>
# <|assistant|>
# # Arguments
# - `name::T`
# message owner name e.f. "system", "user" or "assistant"
# - `text::T`
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
# julia> formattedtext = YiemAgent.formatLLMtext_phi3instruct(d[:name], d[:text])
# ```
# Signature
# """
# function formatLLMtext_phi3instruct(name::T, text::T) where {T<:AbstractString}
# formattedtext =
# """
# <|$name|>
# $text<|end|>\n
# """
# return formattedtext
# end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example
# <|begin_of_text|>
# <|start_header_id|>system<|end_header_id|>
# You are a helpful assistant.
# <|eot_id|>
# <|start_header_id|>user<|end_header_id|>
# Get me an icecream.
# <|eot_id|>
# <|start_header_id|>assistant<|end_header_id|>
# Go buy it yourself at 7-11.
# <|eot_id|>
# # Arguments
# - `name::T`
# message owner name e.f. "system", "user" or "assistant"
# - `text::T`
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> d = Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",)
# julia> formattedtext = YiemAgent.formatLLMtext_llama3instruct(d[:name], d[:text])
# "<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n"
# ```
# Signature
# """
# function formatLLMtext_llama3instruct(name::T, text::T) where {T<:AbstractString}
# formattedtext =
# if name == "system"
# """
# <|begin_of_text|>
# <|start_header_id|>$name<|end_header_id|>
# $text
# <|eot_id|>
# """
# else
# """
# <|start_header_id|>$name<|end_header_id|>
# $text
# <|eot_id|>
# """
# end
# return formattedtext
# end
# """ Convert a chat messages in vector of dictionary into LLM model instruct format.
# # Arguments
# - `messages::Vector{Dict{Symbol, T}}`
# message owner name e.f. "system", "user" or "assistant"
# - `formatname::T`
# format name to be used
# # Return
# - `formattedtext::String`
# text formatted to model format
# # Example
# ```jldoctest
# julia> using Revise
# julia> using YiemAgent
# julia> chatmessage = [
# Dict(:name=> "system",:text=> "You are a helpful, respectful and honest assistant.",),
# Dict(:name=> "user",:text=> "list me all planets in our solar system.",),
# Dict(:name=> "assistant",:text=> "I'm sorry. I don't know. You tell me.",),
# ]
# julia> formattedtext = YiemAgent.formatLLMtext(chatmessage, "llama3instruct")
# "<|begin_of_text|>\n <|start_header_id|>system<|end_header_id|>\n You are a helpful, respectful and honest assistant.\n <|eot_id|>\n <|start_header_id|>user<|end_header_id|>\n list me all planets in our solar system.\n <|eot_id|>\n <|start_header_id|>assistant<|end_header_id|>\n I'm sorry. I don't know. You tell me.\n <|eot_id|>\n"
# ```
# # Signature
# """
# function formatLLMtext(messages::Vector{Dict{Symbol, T}},
# formatname::String="llama3instruct") where {T<:Any}
# f = if formatname == "llama3instruct"
# formatLLMtext_llama3instruct
# elseif formatname == "mistral"
# # not define yet
# elseif formatname == "phi3instruct"
# formatLLMtext_phi3instruct
# else
# error("$formatname template not define yet")
# end
# str = ""
# for t in messages
# str *= f(t[:name], t[:text])
# end
# # add <|assistant|> so that the model don't generate it and I don't need to clean it up later
# if formatname == "phi3instruct"
# str *= "<|assistant|>\n"
# end
# return str
# end
# """
# Arguments\n
# -----
# Return\n
# -----
# Example\n
# -----
# ```jldoctest
# julia>
# ```
# TODO\n
# -----
# [] update docstring
# [PENDING] implement the function
# Signature\n
# -----
# """
# function iterativeprompting(a::T, prompt::String, verification::Function) where {T<:agent}
# msgMeta = GeneralUtils.generate_msgMeta(
# a.config[:externalService][:text2textinstruct],
# senderName= "iterativeprompting",
# senderId= a.id,
# receiverName= "text2textinstruct",
# )
# outgoingMsg = Dict(
# :msgMeta=> msgMeta,
# :payload=> Dict(
# :text=> prompt,
# )
# )
# success = nothing
# result = nothing
# critique = ""
# # iteration loop
# while true
# # send prompt to LLM
# response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg)
# error("--> iterativeprompting")
# # check for correctness and get feedback
# success, _critique = verification(response)
# if success
# result = response
# break
# else
# # add critique to prompt
# critique *= _critique * "\n"
# replace!(prompt, "Critique: ..." => "Critique: $critique")
# end
# end
# return (success=success, result=result)
# end
end # module util