using Revise using JSON, JSON3, Dates, UUIDs, PrettyPrinting, LibPQ, Base64, DataFrames, DataStructures using YiemAgent, GeneralUtils using Base.Threads # ---------------------------------------------- 100 --------------------------------------------- # # load config config = JSON3.read("/appfolder/app/dev/YiemAgent/test/config.json") # config = copy(JSON3.read("../mountvolume/config.json")) function executeSQL(sql::T) where {T<:AbstractString} host = config[:externalservice][:wineDB][:host] port = config[:externalservice][:wineDB][:port] dbname = config[:externalservice][:wineDB][:dbname] user = config[:externalservice][:wineDB][:user] password = config[:externalservice][:wineDB][:password] DBconnection = LibPQ.Connection("host=$host port=$port dbname=$dbname user=$user password=$password") result = LibPQ.execute(DBconnection, sql) close(DBconnection) return result end function executeSQLVectorDB(sql) host = config[:externalservice][:SQLVectorDB][:host] port = config[:externalservice][:SQLVectorDB][:port] dbname = config[:externalservice][:SQLVectorDB][:dbname] user = config[:externalservice][:SQLVectorDB][:user] password = config[:externalservice][:SQLVectorDB][:password] DBconnection = LibPQ.Connection("host=$host port=$port dbname=$dbname user=$user password=$password") result = LibPQ.execute(DBconnection, sql) close(DBconnection) return result end function text2textInstructLLM(prompt::String; maxattempt::Integer=10, modelsize::String="medium", senderId=GeneralUtils.uuid4snakecase(), timeout=90, llmkwargs=Dict( :num_ctx => 32768, :temperature => 0.5, ) ) msgMeta = GeneralUtils.generate_msgMeta( config[:externalservice][:loadbalancer][:mqtttopic]; msgPurpose="inference", senderName="yiemagent", senderId=senderId, receiverName="text2textinstruct_$modelsize", mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerPort=config[:mqttServerInfo][:port], ) outgoingMsg = Dict( :msgMeta => msgMeta, :payload => Dict( :text => prompt, :kwargs => llmkwargs ) ) response = nothing for attempts in 1:maxattempt _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; responsetimeout=timeout, responsemaxattempt=maxattempt) payload = _response[:response] if _response[:success] && payload[:text] !== nothing response = _response[:response][:text] break else println("\n attempt $attempts/$maxattempt failed ", @__FILE__, ":", @__LINE__, " $(Dates.now())") pprintln(outgoingMsg) println(" attempt $attempts/$maxattempt failed ", @__FILE__, ":", @__LINE__, " $(Dates.now())\n") sleep(3) end end return response end # get text embedding from a LLM service function getEmbedding(text::T) where {T<:AbstractString} msgMeta = GeneralUtils.generate_msgMeta( config[:externalservice][:loadbalancer][:mqtttopic]; msgPurpose="embedding", senderName="yiemagent", senderId=sessionId, receiverName="textembedding", mqttBrokerAddress=config[:mqttServerInfo][:broker], mqttBrokerPort=config[:mqttServerInfo][:port], ) outgoingMsg = Dict( :msgMeta => msgMeta, :payload => Dict( :text => [text] # must be a vector of string ) ) response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg; responsetimeout=120, responsemaxattempt=3) embedding = response[:response][:embeddings] return embedding end function findSimilarTextFromVectorDB(text::T1, tablename::T2, embeddingColumnName::T3, vectorDB::Function; limit::Integer=1 )::DataFrame where {T1<:AbstractString, T2<:AbstractString, T3<:AbstractString} # get embedding from LLM service embedding = getEmbedding(text)[1] # check whether there is close enough vector already store in vectorDB. if no, add, else skip sql = """ SELECT *, $embeddingColumnName <-> '$embedding' as distance FROM $tablename ORDER BY distance LIMIT $limit; """ response = vectorDB(sql) df = DataFrame(response) return df end function similarSQLVectorDB(query; maxdistance::Integer=100) tablename = "sqlllm_decision_repository" # get embedding of the query df = findSimilarTextFromVectorDB(query, tablename, "function_input_embedding", executeSQLVectorDB) # println(df[1, [:id, :function_output]]) row, col = size(df) distance = row == 0 ? Inf : df[1, :distance] # distance = 100 # CHANGE this is for testing only if row != 0 && distance < maxdistance # if there is usable SQL, return it. output_b64 = df[1, :function_output_base64] # pick the closest match output_str = String(base64decode(output_b64)) rowid = df[1, :id] println("\n~~~ found similar sql. row id $rowid, distance $distance ", @__FILE__, ":", @__LINE__, " $(Dates.now())") return (dict=output_str, distance=distance) else println("\n~~~ similar sql not found, max distance $maxdistance ", @__FILE__, ":", @__LINE__, " $(Dates.now())") return (dict=nothing, distance=nothing) end end function insertSQLVectorDB(query::T1, SQL::T2; maxdistance::Integer=3) where {T1<:AbstractString, T2<:AbstractString} tablename = "sqlllm_decision_repository" # get embedding of the query # query = state[:thoughtHistory][:question] df = findSimilarTextFromVectorDB(query, tablename, "function_input_embedding", executeSQLVectorDB) row, col = size(df) distance = row == 0 ? Inf : df[1, :distance] if row == 0 || distance > maxdistance # no close enough SQL stored in the database query_embedding = getEmbedding(query)[1] query = replace(query, "'" => "") sql_base64 = base64encode(SQL) sql_ = replace(SQL, "'" => "") sql = """ INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$query', '$sql_', '$sql_base64', '$query_embedding'); """ # println("\n~~~ added new decision to vectorDB ", @__FILE__, ":", @__LINE__, " $(Dates.now())") # println(sql) _ = executeSQLVectorDB(sql) end end function similarSommelierDecision(recentevents::T1; maxdistance::Integer=3 )::Union{AbstractDict, Nothing} where {T1<:AbstractString} tablename = "sommelier_decision_repository" # find similar println("\n~~~ search vectorDB for this: $recentevents ", @__FILE__, " ", @__LINE__) df = findSimilarTextFromVectorDB(recentevents, tablename, "function_input_embedding", executeSQLVectorDB) row, col = size(df) distance = row == 0 ? Inf : df[1, :distance] if row != 0 && distance < maxdistance # if there is usable decision, return it. rowid = df[1, :id] println("\n~~~ found similar decision. row id $rowid, distance $distance ", @__FILE__, " ", @__LINE__) output_b64 = df[1, :function_output_base64] # pick the closest match _output_str = String(base64decode(output_b64)) output = copy(JSON3.read(_output_str)) return output else println("\n~~~ similar decision not found, max distance $maxdistance ", @__FILE__, " ", @__LINE__) return nothing end end function insertSommelierDecision(recentevents::T1, decision::T2; maxdistance::Integer=5 ) where {T1<:AbstractString, T2<:AbstractDict} tablename = "sommelier_decision_repository" # find similar df = findSimilarTextFromVectorDB(recentevents, tablename, "function_input_embedding", executeSQLVectorDB) row, col = size(df) distance = row == 0 ? Inf : df[1, :distance] if row == 0 || distance > maxdistance # no close enough SQL stored in the database recentevents_embedding = getEmbedding(recentevents)[1] recentevents = replace(recentevents, "'" => "") decision_json = JSON3.write(decision) decision_base64 = base64encode(decision_json) decision = replace(decision_json, "'" => "") sql = """ INSERT INTO $tablename (function_input, function_output, function_output_base64, function_input_embedding) VALUES ('$recentevents', '$decision', '$decision_base64', '$recentevents_embedding'); """ println("\n~~~ added new decision to vectorDB ", @__FILE__, " ", @__LINE__) println(sql) _ = executeSQLVectorDB(sql) else println("~~~ similar decision previously cached, distance $distance ", @__FILE__, " ", @__LINE__) end end sessionId = GeneralUtils.uuid4snakecase() externalFunction = ( getEmbedding=getEmbedding, text2textInstructLLM=text2textInstructLLM, executeSQL=executeSQL, similarSQLVectorDB=similarSQLVectorDB, insertSQLVectorDB=insertSQLVectorDB, similarSommelierDecision=similarSommelierDecision, insertSommelierDecision=insertSommelierDecision, ) # s = "full-bodied red wine, budget 1500 USD" # r = YiemAgent.extractWineAttributes_1(agent, s) # println(r) # --------------------------- generating scenario and customer profile --------------------------- # function rolegenerator() rolegenerator_systemmsg = """ Your role: - You are a helpful assistant Your mission: - Create one random role of a potential customer of an internet wine store. You must follow the following guidelines: - the user only need the role, do not add your own words. - the role should be detailed and realistic. You should then respond to the user with: Name: a name of the potential customer Situation: a situation that the potential customer may be facing Mission: a mission of the potential customer Profile: a profile of the potential customer, including their age, gender, occupation, and other relevant information You should only respond in format as described below: Name: ... Situation: ... Mission: ... Profile: ... Additional_information: ... Here are some examples: Name: Jimmy Situation: - Your relationship with your boss is not that good. You need to improve your relationship with your boss. - Your boss's wedding anniversary is coming up. - You are at a wine store and start talking with the store's sommelier. Mission: - Ask the sommelier to provide multiple wine options, and subsequently choose one option from the presented list. Profile: - You are a young professional in a big company. - You are avid party goer - You like beer. - You know nothing about wine. - You have a budget of 1500usd. Additional_information: - your boss like spicy food. - your boss is a middle-aged man. - your boss likes Australian wine. Name: Kate Situation: - Your husband asked you to get him a bottle of wine. He will gift the wine to his business client while dining at a German restaurant. - Your husband is a business client and he will gift the wine to his business - You are at a wine store and start talking with the store's sommelier. Mission: - Ask the sommelier to provide multiple wine options, and subsequently choose one option from the presented list. Profile: - You are a CEO in a startup company. - You are a nerd - You don't like alcohol. - You have a budget of 150usd. - You don't care about organic, sulfite, gluten-free, or sustainability certified wines Additional_information: - your husband like spicy food. - your husband is a middle-aged man. Name: John Situation: - A local newspaper club wants to have a scoop about wine with local food in the U.S. - You are at a wine store and start talking with the store's sommelier. Mission: - Ask the sommelier to provide multiple wine options, and subsequently choose one option from the presented list. Profile: - I'm a young guy. - I prefer to express my ideas in a succinct and clear manner. Additional_information: - N/A Name: Jane Situation: - You have catering a dinner party with French cuisine. - You want to serve wine with your guests. - You are at a wine store and start talking with the store's sommelier. Mission: - Ask the sommelier to provide multiple wine options, and subsequently choose one option from the presented list. Profile: - You are a young French restaurant owner. - You like dry, full-bodied red wine with high tannin - You don't care about organic, sulfite, gluten-free, or sustainability certified wines. - You have a budget of 200 usd. Additional_information: - N/A Let's begin! """ header = ["Name:", "Situation:", "Mission:", "Profile:", "Additional_information:"] dictkey = ["name", "situation", "mission", "profile", "additional_information"] errornote = "N/A" for attempt in 1:10 _prompt = [ Dict(:name => "system", :text => rolegenerator_systemmsg), ] prompt = GeneralUtils.formatLLMtext(_prompt, "qwen3") response = text2textInstructLLM(prompt) # generated role response = GeneralUtils.deFormatLLMtext(response, "qwen3") think, response = GeneralUtils.extractthink(response) # check whether response has all header detected_kw = GeneralUtils.detect_keyword(header, response) kwvalue = [i for i in values(detected_kw)] zeroind = findall(x -> x == 0, kwvalue) missingkeys = [header[i] for i in zeroind] if 0 ∈ values(detected_kw) errornote = "$missingkeys are missing from your previous response" println("\nERROR YiemAgent rolegenerator() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue elseif sum(values(detected_kw)) > length(header) errornote = "\nYour previous attempt has duplicated points according to the required response format" println("\nERROR YiemAgent rolegenerator() $errornote ", @__FILE__, ":", @__LINE__, " $(Dates.now())") continue end responsedict = GeneralUtils.textToDict(response, header; dictKey=dictkey, symbolkey=true) responsedict[:id] = GeneralUtils.uuid4snakecase() responsedict[:systemmsg] = """ You are role playing as a CUSTOMER of a wine store and you are currently talking with a sommelier of a wine store. Your profile is as follows: Situation: $(responsedict[:situation]) Mission: $(responsedict[:mission]) Profile: $(responsedict[:profile]) Additional_information: $(responsedict[:additional_information]) You should follow the following guidelines: - Focus on the lastest conversation - Your like to be short and concise - If you don't know an answer to sommelier's question, you should say: I don't know. - If you think the store can't provide what you seek, you can leave. You should then respond to the user with: Dialogue: what you want to say to the user Role: Verify that the dialogue is intended for the customer of a wine store. Can be "yes" or "no" You should only respond in format as described below: Dialogue: ... Role: ... Let's begin! """ println("\nrolegenerator() ", @__FILE__, ":", @__LINE__, " $(Dates.now())") println(responsedict) return responsedict end error("ERROR rolegenerator() failed to generate customer role: ", @__FILE__, ":", @__LINE__, " $(Dates.now())") end # Define the external functions for the customer agent in named tuple format customer_externalFunction = ( text2textInstructLLM=text2textInstructLLM, ) function main() agent = YiemAgent.sommelier( externalFunction; name="Jane", id=sessionId, # agent instance id retailername="Yiem", llmFormatName="qwen3" ) customerDict = rolegenerator() customer = YiemAgent.virtualcustomer( customer_externalFunction; systemmsg=customerDict[:systemmsg], name=customerDict[:name], id=sessionId, # agent instance id llmFormatName="qwen3" ) # customer_chat = "hello" # YiemAgent.addNewMessage(customer, "assistant", customer_chat) # # add user activity to events memory # push!(customer.memory[:events], # YiemAgent.eventdict(; # event_description="the assistant talks to the user.", # timestamp=Dates.now(), # subject="assistant", # actionname="CHATBOX", # action_input=customer_chat, # ) # ) # println("\ncustomer respond:\n $customer_chat") agent_response = YiemAgent.conversation(agent; maximumMsg=50) println("\nagent respond:\n $agent_response") while true customer_chat = nothing while customer_chat === nothing customer_response = YiemAgent.conversation(customer, Dict(:text=> agent_response); converPartnerName=agent.name, maximumMsg=50) customer_response = GeneralUtils.deFormatLLMtext(customer_response, customer.llmFormatName) customer_chat = customer_response #[WORKING] check whether customer response the same before end println("\ncustomer respond:\n $customer_chat") agent_response = YiemAgent.conversation(agent; userinput=Dict(:text=> customer_chat), maximumMsg=50) println("\nagent respond:\n $agent_response") if haskey(agent.memory[:events][end], :thought) lastAssistantAction = agent.memory[:events][end][:thought][:actionname] if lastAssistantAction == "ENDCONVERSATION" # store thoughtDict # save a.memory[:shortmem][:decisionlog] to disk using JSON3 println("\nsaving agent.memory[:shortmem][:decisionlog] to disk") date = "$(Dates.now())" date = replace(date, ':'=>'.') filename = "agent_decision_log_$(date)_$(agent.id).json" filepath = "/appfolder/mountvolume/appdata/log/$filename" open(filepath, "w") do io JSON3.pretty(io, agent.memory[:shortmem][:decisionlog]) end # check how many file in /appfolder/mountvolume/appdata/log/ folder now logfilesnumber = length(readdir("/appfolder/mountvolume/appdata/log/")) println("\nCaching conversation process done. Total $logfilesnumber files in /appfolder/mountvolume/appdata/log/ folder now.\n") break end end end end for i in 1:100 main() println("\n Round $i/100 done.") end println("done") # prompt = # """ # <|im_start|>system # You are a role playing agent acting as: # Name: Emily # Situation: - Emily is planning her upcoming birthday party and wants to make it extra special. She has invited close friends and family, and she's looking for a unique wine that will impress them. # Mission: - Emily needs to find a rare and high-quality wine that matches the theme of her party, which is a mix of classic and modern flavors. She also wants to ensure that the wine is not too expensive so that it won't break her budget. # Profile: - Emily is in her late 20s, works as a marketing executive for a tech company, and has a passion for trying new things. She's organized and detail-oriented but can be spontaneous when it comes to planning events. # Additional_information: - Emily loves experimenting with different types of food and wine pairings. # Your are currently talking with a sommelier. # You should follow the following guidelines: # - Focus on the lastest conversation # - If you satisfy with the sommelier's recommendation for bottle of wine(s), you should say: Thanks for you help. I will buy the wine you recommended. # - If you don't satisfy with the sommelier's questions or can't get a good wine recommendation, you can continue the conversation. # Let's begin! # <|im_end|> # <|im_start|>Jane # Hello! Welcome to Yiem's Wine Store. I'm Jane, your friendly sommelier. How can I assist you today? What type of wine are you in the mood for, and is there a special occasion or event on your mind? # <|im_end|> # <|im_start|>Emily # Hi Jane! Thank you so much for welcoming me. For my birthday party, I'm looking for something that combines classic and modern flavors. It's a mix of guests who enjoy both traditional tastes and more contemporary ones. Also, I want to make sure it won't break the bank. Any suggestions? # <|im_end|> # <|im_start|>Jane # Thank you for sharing your preferences, Jane! To better assist you, could you please let me know if there are any specific characteristics of wine you're looking for, such as tannin, sweetness, intensity, or acidity? Additionally, do you have any food items in mind that this wine should pair well with? # <|im_end|> # <|im_start|>Emily # """ # llmkwargs=Dict( # :num_ctx => 32768, # :temperature => 0.3, # ) # r = text2textInstructLLM(prompt, llmkwargs=llmkwargs) # println(r) # println(555) # response = YiemAgent.conversation(agent, Dict(:text=> "I want to get a French red wine under 100.")) # while true # println("your respond: ") # user_answer = readline() # response = YiemAgent.conversation(agent, Dict(:text=> user_answer)) # println("\n$response") # end # """ # Hello # I would like to get a bottle of wine for my boss but I don't know much about wine. Can you help me? # well actually, my boss is going to offer the wine to his client as a gift in a business meeting. All I know is his client like spicy food and French wine. I have a budget about 1000. # """ # input = "French wine, bordeaux, under USD100, pairs with spicy food" # r = YiemAgent.extractWineAttributes_1(a, input) # inventory_order = "French Syrah, Viognier, full bodied, under 100" # r = YiemAgent.extractWineAttributes_2(a, inventory_order) # pprintln(r) # cron job # @reboot sleep 50 && nvidia-smi -pm 1 # @reboot sleep 51 && nvidia-smi -i 0 -pl 150 # @reboot sleep 52 && nvidia-smi -i 1 -pl 150 # @reboot sleep 53 && nvidia-smi -i 2 -pl 150 # @reboot sleep 54 && nvidia-smi -i 3 -pl 150 # @reboot sleep 55 && julia -t 2 /home/ton/work/restartContainer/main.jl # using GeneralUtils # msgMeta = GeneralUtils.generate_msgMeta( # "/tonpc_containerServices", # senderName= "somename", # senderId= "1230", # mqttBrokerAddress= "mqtt.yiem.cc", # mqttBrokerPort= 1883, # ) # outgoingMsg = Dict( # :msgMeta=> msgMeta, # :payload=> "docker container restart playground-app", # ) # GeneralUtils.sendMqttMsg(outgoingMsg)