diff --git a/src/interface.jl b/src/interface.jl
index bde60f7..57b0453 100755
--- a/src/interface.jl
+++ b/src/interface.jl
@@ -1200,7 +1200,7 @@ end
)
julia> response = ChatAgent.conversation(newAgent, "Hi! how are you?")
```
-# """
+"""
function conversation(a::agentReflex, usermsg::String; attemptlimit::Int=3)
a.attemptlimit = attemptlimit
workstate = nothing
@@ -1234,6 +1234,62 @@ function conversation(a::agentReflex, usermsg::String; attemptlimit::Int=3)
return response
end
+
+
+""" Chat with llm.
+
+ Arguments\n
+ -----
+ a::agent
+ an agent
+
+ Return\n
+ -----
+ None
+
+ Example\n
+ -----
+ ```jldoctest
+ julia> using JSON3, UUIDs, Dates, FileIO, MQTTClient, ChatAgent
+ julia> const mqttBroker = "mqtt.yiem.cc"
+ julia> mqttclient, connection = MakeConnection(mqttBroker, 1883)
+ julia> tools=Dict( # update input format
+ "askbox"=>Dict(
+ :description => "Useful for when you need to ask the user for more context. Do not ask the user their own question.",
+ :input => "Input is a text in JSON format.{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}",
+ :output => "" ,
+ :func => nothing,
+ ),
+ )
+ julia> msgMeta = Dict(
+ :msgPurpose=> "updateStatus",
+ :from=> "agent",
+ :to=> "llmAI",
+ :requestresponse=> "request",
+ :sendto=> "", # destination topic
+ :replyTo=> "agent/api/v0.1.0/txt/response", # requester ask responseer to send reply to this topic
+ :repondToMsgId=> "", # responseer is responseing to this msg id
+ :taskstatus=> "", # "complete", "fail", "waiting" or other status
+ :timestamp=> Dates.now(),
+ :msgId=> "$(uuid4())",
+ )
+ julia> a = ChatAgent.agentReflex(
+ "Jene",
+ mqttclient,
+ msgMeta,
+ agentConfigTopic, # I need a function to send msg to config topic to get load balancer
+ role=:sommelier,
+ tools=tools
+ )
+ julia> newAgent = ChatAgent.agentReact(agent)
+ julia> response = ChatAgent.conversation(newAgent, "Hi! how are you?")
+ ```
+
+ Signature\n
+ -----
+"""
+
+
"""
Continuously run llm functions except when llm is getting Answer: or askbox.
There are many work() depend on thinking mode.
diff --git a/src/type.jl b/src/type.jl
index 21553e2..a5e3dbc 100644
--- a/src/type.jl
+++ b/src/type.jl
@@ -79,11 +79,9 @@ julia> agent = ChatAgent.agentReflex(
```
"""
@kwdef mutable struct agentReflex <: agent
- availableRole::AbstractVector = ["system", "user", "assistant"]
agentName::String = "Jene" # ex. Jene
- maxUserMsg::Int = 30
- earlierConversation::String = "N/A" # summary of earlier conversation
+ availableRole::AbstractVector = ["system", "user", "assistant"]
""" Dict(Role=> Content) ; Role can be system, user, assistant
Example:
messages=[
@@ -94,37 +92,42 @@ julia> agent = ChatAgent.agentReflex(
"""
role::Symbol = :assistant
roles::Dict = Dict(:assistant => "You are a helpful assistant.",)
+ roleSpecificInstruction::Union{Dict, Nothing} = nothing
+ thinkingFormat::Union{Dict, Nothing} = nothing
- # Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
- # messages= [Dict(:role=>"system", :content=> "", :timestamp=> Dates.now()),]
- messages = Vector{Dict{Symbol, Any}}()
tools::Union{Dict, Nothing} = nothing
newplan::Bool = false # if true, new plan will be generated
attemptlimit::Int = 5 # thinking round limit
attempt::Int = 1 # attempted number
task::Int = 1 # task number
- env::AbstractString = "N/A"
- thinkingFormat::Union{Dict, Nothing} = nothing
- roleSpecificInstruction::Union{Dict, Nothing} = nothing
- memory = newAgentMemory()
+
+ memory = newAgentMemory() # store various memory
+
+ # Ref: Chat prompt format https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/discussions/3
+ # messages= [Dict(:role=>"system", :content=> "", :timestamp=> Dates.now()),]
+ messages = Vector{Dict{Symbol, Any}}() # store messages history in the format :name=>"message"
+ maxUserMsg::Int = 30 # 31th and earlier messages will get summarized
+ earlierConversation::String = "N/A" # summary of earlier conversation
+
+ # communication
+ configTopic::String="" # store mqtt topic where an agent can get configuration
+ mqttClient::Any=nothing # store mqtt client for use in various internal functions
+ msgMeta::Union{Dict, Nothing} = nothing # a template for msgMeta
+ mqttMsgList_chat::Vector{Dict} = Vector{Dict}() # store incoming mqtt chat msg
+ mqttMsgList_internal::Vector{Dict} = Vector{Dict}() # store incoming mqtt internal use msg
# LLM function related
winestockResult = ""
end
function agentReflex(
- agentName::String;
- mqttClientSpec::NamedTuple=(
- clientName= "someclient", # name of this client
- clientID= "$(uuid4())",
- broker= "mqtt.yiem.ai",
- pubtopic= (imgAI="img/api/v0.0.1/gpu/request",
- txtAI="txt/api/v0.1.0/gpu/request"),
- subtopic= (imgAI="agent/api/v0.1.0/img/respond",
- txtAI="agent/api/v0.1.0/txt/respond"),
- keepalive= 30,),
- role::Symbol=:assistant,
- roles::Dict=Dict(
+ agentName::String,
+ mqttClient,
+ msgMeta::Dict,
+ configTopic::String,
+ ;
+ role::Symbol=:assistant,
+ roles::Dict=Dict(
:assistant =>
"""
You are a helpful assistant.
@@ -240,22 +243,15 @@ function agentReflex(
# :func => nothing,
# ),
),
- msgMeta::Dict=Dict(
- :msgPurpose=> "updateStatus",
- :from=> "chatbothub",
- :to=> "llmAI",
- :requestrespond=> "request",
- :sendto=> "", # destination topic
- :replyTo=> "agent/api/v0.1.0/txt/respond", # requester ask responder to send reply to this topic
- :repondToMsgId=> "", # responder is responding to this msg id
- :taskstatus=> "", # "complete", "fail", "waiting" or other status
- :timestamp=> Dates.now(),
- :msgId=> "$(uuid4())",
- ),
availableRole::AbstractArray=["system", "user", "assistant"],
- maxUserMsg::Int=10,)
+ maxUserMsg::Int=10,
+ )
newAgent = agentReflex()
+ newAgent.agentName = agentName
+ newAgent.mqttClient = mqttClient
+ newAgent.msgMeta = msgMeta
+ newAgent.configTopic = configTopic
newAgent.availableRole = availableRole
newAgent.maxUserMsg = maxUserMsg
newAgent.msgMeta = msgMeta
@@ -264,7 +260,7 @@ function agentReflex(
newAgent.roles = roles
newAgent.thinkingFormat = thinkingFormat
newAgent.roleSpecificInstruction = roleSpecificInstruction
- newAgent.agentName = agentName
+
return newAgent
end