From dea3f0260e965f61d7a56015c3074c604dd34c7d Mon Sep 17 00:00:00 2001 From: narawat lamaiin Date: Sat, 4 May 2024 18:05:46 +0700 Subject: [PATCH] update --- src/interface.jl | 30 +++++++++++++++++++++++++----- src/llmfunction.jl | 3 ++- src/util.jl | 4 ---- 3 files changed, 27 insertions(+), 10 deletions(-) diff --git a/src/interface.jl b/src/interface.jl index e03e89f..76b9e02 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -154,10 +154,18 @@ function decisionMaker(a::T1, state::T2)::Dict{Symbol, Any} where {T1<:agent, T2 {Thought """ - prompt = formatLLMtext_llama3instruct("system", _prompt) + # apply LLM specific instruct format + externalService = a.config[:externalservice][:text2textinstruct] + llminfo = externalService[:llminfo] + prompt = + if llminfo[:name] == "llama3instruct" + formatLLMtext_llama3instruct("system", _prompt) + else + error("llm model name is not defied yet $(@__LINE__)") + end msgMeta = GeneralUtils.generate_msgMeta( - a.config[:externalservice][:text2textinstruct][:mqtttopic], + externalService[:mqtttopic], senderName= "decisionMaker", senderId= a.id, receiverName= "text2textinstruct", @@ -172,9 +180,9 @@ function decisionMaker(a::T1, state::T2)::Dict{Symbol, Any} where {T1<:agent, T2 :kwargs=> Dict( :max_tokens=> 512, :stop=> ["<|eot_id|>"], + ) ) ) - ) _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg) _thoughtJsonStr = _response[:response][:text] @@ -252,7 +260,15 @@ function progressValueEstimator(a::T1, state::T2)::Tuple{String, Integer} where $(JSON3.write(state[:thoughtHistory])) """ - prompt = formatLLMtext_llama3instruct("system", _prompt) + # apply LLM specific instruct format + externalService = a.config[:externalservice][:text2textinstruct] + llminfo = externalService[:llminfo] + prompt = + if llminfo[:name] == "llama3instruct" + formatLLMtext_llama3instruct("system", _prompt) + else + error("llm model name is not defied yet $(@__LINE__)") + end msgMeta = GeneralUtils.generate_msgMeta( a.config[:externalservice][:text2textinstruct][:mqtttopic], @@ -267,8 +283,12 @@ function progressValueEstimator(a::T1, state::T2)::Tuple{String, Integer} where :msgMeta=> msgMeta, :payload=> Dict( :text=> prompt, + :kwargs=> Dict( + :max_tokens=> 512, + :stop=> ["<|eot_id|>"], + ) + ) ) - ) _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg) _thoughtJsonStr = _response[:response][:text] diff --git a/src/llmfunction.jl b/src/llmfunction.jl index 505a347..2a54bc0 100644 --- a/src/llmfunction.jl +++ b/src/llmfunction.jl @@ -411,6 +411,7 @@ function jsoncorrection(a::T1, input::T2, Corrention: """ + # apply LLM specific instruct format externalService = a.config[:externalservice][:text2textinstruct] llminfo = externalService[:llminfo] prompt = @@ -437,9 +438,9 @@ function jsoncorrection(a::T1, input::T2, :kwargs=> Dict( :max_tokens=> 512, :stop=> ["<|eot_id|>"], + ) ) ) - ) result = GeneralUtils.sendReceiveMqttMsg(outgoingMsg) incorrectjson = result[:response][:text] end diff --git a/src/util.jl b/src/util.jl index 2fa99d2..0c2aebf 100644 --- a/src/util.jl +++ b/src/util.jl @@ -407,10 +407,6 @@ end - - - -