This commit is contained in:
narawat lamaiin
2025-07-14 19:33:12 +07:00
parent bad2ca35ed
commit 8a9c9606c7
3 changed files with 38 additions and 29 deletions

View File

@@ -158,7 +158,7 @@ function decisionMaker(a::T; recentevents::Integer=20, maxattempt=10
# end
# recentrecap = GeneralUtils.dictToString_noKey(_recentrecap)
# similarDecision = a.func[:similarSommelierDecision](recentrecap)
# similarDecision = a.context[:similarSommelierDecision](recentrecap)
similarDecision = nothing #CHANGE
if similarDecision !== nothing
@@ -234,7 +234,7 @@ function decisionMaker(a::T; recentevents::Integer=20, maxattempt=10
println("\nYiemAgent decisionMaker() attempt $attempt/$maxattempt ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
end
# QandA = generatequestion(a, a.func[:text2textInstructLLM], timeline)
# QandA = generatequestion(a, a.context[:text2textInstructLLM], timeline)
context =
"""
@@ -285,7 +285,7 @@ function decisionMaker(a::T; recentevents::Integer=20, maxattempt=10
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = a.context.text2textInstructLLM(prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
think, response = GeneralUtils.extractthink(response)
@@ -468,7 +468,7 @@ end
# end
# recentrecap = GeneralUtils.dictToString_noKey(_recentrecap)
# # similarDecision = a.func[:similarSommelierDecision](recentrecap)
# # similarDecision = a.context[:similarSommelierDecision](recentrecap)
# similarDecision = nothing #CHANGE
# if similarDecision !== nothing
@@ -500,7 +500,7 @@ end
# println("\nYiemAgent decisionMaker() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# end
# QandA = generatequestion(a, a.func[:text2textInstructLLM]; recent=3)
# QandA = generatequestion(a, a.context[:text2textInstructLLM]; recent=3)
# systemmsg =
# """
# Your name is $(a.name). You are a helpful English-speaking assistant, acting as a polite, website-based sommelier for $(a.retailername)'s wine store.
@@ -601,7 +601,7 @@ end
# # change qwen format put in model format
# prompt = GeneralUtils.formatLLMtext(unformatPrompt, "qwen3")
# response = a.func[:text2textInstructLLM](prompt)
# response = a.context[:text2textInstructLLM](prompt)
# response = GeneralUtils.remove_french_accents(response)
# response = replace(response, "**"=>"")
# response = replace(response, "***"=>"")
@@ -838,7 +838,7 @@ function evaluator(a::T1, timeline, decisiondict, evaluateecontext
println("")
println(prompt)
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = a.context.text2textInstructLLM(prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
# response = replace(response, '$'=>"USD")
@@ -1067,7 +1067,7 @@ julia>
# Signature
"""
function think(a::T)::NamedTuple{(:actionname, :result),Tuple{String,String}} where {T<:agent}
# a.memory[:recap] = generateSituationReport(a, a.func[:text2textInstructLLM]; skiprecent=0)
# a.memory[:recap] = generateSituationReport(a, a.context[:text2textInstructLLM]; skiprecent=0)
thoughtDict = decisionMaker(a)
actionname = thoughtDict[:actionname]
@@ -1272,7 +1272,7 @@ function presentbox(a::sommelier, thoughtDict; maxtattempt::Integer=10, recentev
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = a.context.text2textInstructLLM(prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
# response = replace(response, '$'=>"USD")
@@ -1453,7 +1453,7 @@ end
# # add info
# prompt = prompt * context
# response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
# response = a.context[:text2textInstructLLM](prompt; senderId=a.id)
# response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
# response = GeneralUtils.remove_french_accents(response)
# # response = replace(response, '$'=>"USD")
@@ -1634,7 +1634,7 @@ function generatechat(a::sommelier, thoughtDict; maxattempt::Integer=10)
for attempt in 1:maxattempt
# if attempt > 1 # use to prevent LLM generate the same respond over and over
# println("\nYiemAgent generatchat() attempt $attempt/10 ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# yourthought1 = paraphrase(a.func[:text2textInstructLLM], yourthought)
# yourthought1 = paraphrase(a.context[:text2textInstructLLM], yourthought)
# else
# yourthought1 = yourthought
# end
@@ -1658,7 +1658,7 @@ function generatechat(a::sommelier, thoughtDict; maxattempt::Integer=10)
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = a.context.text2textInstructLLM(prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
# response = replace(response, '$'=>"USD")
@@ -1807,7 +1807,7 @@ function generatechat(a::companion; recentevents::Integer=10,
prompt = replace(_prompt, "|>user"=>"|>$(converPartnerName)")
prompt = replace(prompt, "|>assistant"=>"|>$(a.name)")
response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs, senderId=a.id)
response = a.context.text2textInstructLLM(prompt; llmkwargs=llmkwargs, senderId=a.id)
response = replace(response, "<|im_start|>"=> "")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
@@ -1863,7 +1863,7 @@ function generatechat(a::virtualcustomer;
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; llmkwargs=llmkwargs, senderId=a.id)
response = a.context.text2textInstructLLM(prompt; llmkwargs=llmkwargs, senderId=a.id)
response = replace(response, "<|im_start|>"=> "")
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
@@ -2291,7 +2291,7 @@ function detectWineryName(a, text)
# put in model format
prompt = GeneralUtils.formatLLMtext(_prompt, a.llmFormatName)
response = a.func[:text2textInstructLLM](prompt; senderId=a.id)
response = a.context.text2textInstructLLM(prompt; senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
think, response = GeneralUtils.extractthink(response)
println("\ndetectWineryName() ", @__FILE__, ":", @__LINE__, " $(Dates.now())")

View File

@@ -309,10 +309,11 @@ function checkwine(a::T1, input::T2; maxattempt::Int=3
inventoryquery = "Retrieves $retrieve_attributes of wines that match the following criteria - {$_inventoryquery}"
println("\ncheckinventory input: $inventoryquery ", @__FILE__, ":", @__LINE__, " $(Dates.now())")
# add suppport for similarSQLVectorDB
textresult, rawresponse = SQLLLM.query(inventoryquery, a.func[:executeSQL],
a.func[:text2textInstructLLM];
insertSQLVectorDB=a.func[:insertSQLVectorDB],
similarSQLVectorDB=a.func[:similarSQLVectorDB],
textresult, rawresponse = SQLLLM.query(inventoryquery,
a.context.executeSQL,
a.context.text2textInstructLLM;
insertSQLVectorDB=a.context.insertSQLVectorDB,
similarSQLVectorDB=a.context.similarSQLVectorDB,
llmFormatName="qwen3")
# check if all of retrieve_attributes appears in textresult
isin = [occursin(x, textresult) for x in retrieve_attributes]
@@ -463,7 +464,7 @@ function extractWineAttributes_1(a::T1, input::T2; maxattempt=10
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; modelsize="medium", senderId=a.id)
response = a.context.text2textInstructLLM(prompt; modelsize="medium", senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
think, response = GeneralUtils.extractthink(response)
@@ -742,7 +743,7 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
# add info
prompt = prompt * context
response = a.func[:text2textInstructLLM](prompt; modelsize="medium", senderId=a.id)
response = a.context.text2textInstructLLM(prompt; modelsize="medium", senderId=a.id)
response = GeneralUtils.deFormatLLMtext(response, a.llmFormatName)
response = GeneralUtils.remove_french_accents(response)
think, response = GeneralUtils.extractthink(response)

View File

@@ -1,6 +1,6 @@
module type
export agent, sommelier, companion, virtualcustomer
export agent, sommelier, companion, virtualcustomer, appcontext
using Dates, UUIDs, DataStructures, JSON3, NATS
using GeneralUtils
@@ -8,9 +8,17 @@ using GeneralUtils
# ---------------------------------------------- 100 --------------------------------------------- #
#[WORKING]
# struct appcontext
# connection::
# end
mutable struct appcontext
const connection::NATS.Connection
const text2textInstructLLMServiceSubject::String
getTextEmbedding::Function
text2textInstructLLM::Function
executeSQL::Function
similarSQLVectorDB::Function
insertSQLVectorDB::Function
similarSommelierDecision::Function
insertSommelierDecision::Function
end
abstract type agent end
@@ -27,7 +35,7 @@ mutable struct companion <: agent
end
function companion(
func::NamedTuple # NamedTuple of functions
context::appcontext # NamedTuple of functions
;
name::String= "Assistant",
id::String= GeneralUtils.uuid4snakecase(),
@@ -75,7 +83,7 @@ function companion(
maxHistoryMsg,
chathistory,
memory,
func,
context,
llmFormatName
)
@@ -165,7 +173,7 @@ mutable struct sommelier <: agent
end
function sommelier(
func, # NamedTuple of functions
context::appcontext, # NamedTuple of functions
;
name::String= "Assistant",
id::String= string(uuid4()),
@@ -216,7 +224,7 @@ function sommelier(
maxHistoryMsg,
chathistory,
memory,
func,
context,
llmFormatName
)