diff --git a/src/interface.jl b/src/interface.jl index b0e9617..e437091 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -320,21 +320,57 @@ function reflector() """ You are an advanced reasoning agent that can improve based on self refection. You will be given the information related to the previous help you've done for a user. - - - - - - - You were unsuccessful in helping the user either because you guessed the wrong answer with Finish[], or you used up your set number of reasoning steps. - - - + You were unsuccessful in helping the user either because you guessed the wrong answer with Finish[answer], or you used up your set number of reasoning steps. In a few sentences, Diagnose a possible reason for failure and devise a new, concise, high level plan that aims to mitigate the same failure. Use complete sentences. Here are some examples: Previous Trial: + { + "question": "Hello, I would like a get a bottle of wine", + "thought_1": "A customer wants to buy a bottle of wine. Before making a recommendation, I need to know more about their preferences.", + "action_1": {"name": "chatbox", "input": "What is the occasion for which you're buying this wine?"}, + "observation_1": "We are holding a wedding party", + + "thought_2": "A wedding party, that's a great occasion! The customer might be looking for a celebratory drink. Let me ask some more questions to narrow down the options.", + "action_2": {"name": "chatbox", "input": "What type of food will you be serving at the wedding?"}, + "observation_2": "It will be Thai dishes.", + + "thought_3": "With Thai food, I should recommend a wine that complements its spicy and savory flavors. And since it's a celebratory occasion, the customer might prefer a full-bodied wine.", + "action_3": {"name": "chatbox", "input": "What is your budget for this bottle of wine?"}, + "observation_3": "I would spend up to 50 bucks.", + + "thought_4": "Now that I have some more information, it's time to narrow down the options.", + "action_4": {"name": "winestock", "input": "red wine with full body, pairs well with spicy food, budget \$50"}, + "observation_4": "I found the following wines in our stock: \n{\n 1: El Enemigo Cabernet Franc 2019\n2: Tantara Chardonnay 2017\n\n}\n", + + "thought_5": "Now that I have a list of potential wines, I need to know more about the customer's taste preferences.", + "action_5": {"name": "chatbox", "input": "What type of wine characteristics are you looking for? (e.g. t.e.g. tannin level, sweetness, intensity, acidity)"}, + "observation_5": "I like full-bodied Red wine with low tannin.", + + "thought_6": "Now that I have more information about the customer's preferences, it's time to make a recommendation.", + "action_6": {"name": "recommendbox", "input": "El Enemigo Cabernet Franc 2019"}, + "observation_6": "I don't like the one you recommend. I want dry wine." + } + Reflection: I searched one of the prime ministers involved in the signing, then attemted to answer right away. I should have searched each of the prime ministers, then looked up 'death' on each of their pages in order to get more information before answering. + + + + + + + + + + + + + + + + + + Question: The Rome Protocols were signed by three Prime Ministers one of which was assassinated as part of what? Thought 1: I need to search Rome Protocols, find the three Prime Ministers, then find what they were assassinated as part of. Action 1: Search[Rome Protocols] @@ -386,6 +422,52 @@ function reflector() Previous trial: {trajectory}Reflection: """ + + # apply LLM specific instruct format + externalService = a.config[:externalservice][:text2textinstruct] + llminfo = externalService[:llminfo] + prompt = + if llminfo[:name] == "llama3instruct" + formatLLMtext_llama3instruct("system", _prompt) + else + error("llm model name is not defied yet $(@__LINE__)") + end + + msgMeta = GeneralUtils.generate_msgMeta( + a.config[:externalservice][:text2textinstruct][:mqtttopic], + senderName= "progressValueEstimator", + senderId= a.id, + receiverName= "text2textinstruct", + mqttBroker= a.config[:mqttServerInfo][:broker], + mqttBrokerPort= a.config[:mqttServerInfo][:port], + ) + + outgoingMsg = Dict( + :msgMeta=> msgMeta, + :payload=> Dict( + :text=> prompt, + :kwargs=> Dict( + :max_tokens=> 512, + :stop=> ["<|eot_id|>"], + ) + ) + ) + + _response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg) + _thoughtJsonStr = _response[:response][:text] + + # expectedJsonExample = + # """ + # Here is an expected JSON format: + # {"evaluation": "...", "score": "..."} + # """ + # resultJsonStr = jsoncorrection(a, _thoughtJsonStr, expectedJsonExample) + # resultDict = copy(JSON3.read(resultJsonStr)) + + # return resultDict[:evaluation], resultDict[:score] + + + return _thoughtJsonStr end diff --git a/test/test_1.jl b/test/test_1.jl index e93650d..ce9e443 100644 --- a/test/test_1.jl +++ b/test/test_1.jl @@ -137,7 +137,7 @@ result = GeneralUtils.sendMqttMsg(outgoingMsg) outgoingMsg = Dict( :msgMeta=> msgMeta, :payload=> Dict( - :text=> "Yep.", + :text=> "I don't have any thing specific", :select=> nothing, :reward=> 0, :isterminal=> false, diff --git a/test/test_2.jl b/test/test_2.jl new file mode 100644 index 0000000..36c9f39 --- /dev/null +++ b/test/test_2.jl @@ -0,0 +1,130 @@ +using Revise # remove when this package is completed +using YiemAgent, GeneralUtils, JSON3, MQTTClient, Dates, UUIDs, DataStructures +using Base.Threads + +# ---------------------------------------------- 100 --------------------------------------------- # + +config = copy(JSON3.read("config.json")) + +instanceInternalTopic = config[:serviceInternalTopic][:mqtttopic] * "/1" + +client, connection = MakeConnection(config[:mqttServerInfo][:broker], + config[:mqttServerInfo][:port]) + +receiveUserMsgChannel = Channel{Dict}(4) +receiveInternalMsgChannel = Channel{Dict}(4) + +msgMeta = GeneralUtils.generate_msgMeta( + "N/A", + replyTopic = config[:servicetopic][:mqtttopic] # ask frontend reply to this instance_chat_topic + ) + +agentConfig = Dict( + :mqttServerInfo=> config[:mqttServerInfo], + :receivemsg=> Dict( + :prompt=> config[:servicetopic][:mqtttopic], # topic to receive prompt i.e. frontend send msg to this topic + :internal=> instanceInternalTopic, + ), + :externalservice=> config[:externalservice], +) + +# Instantiate an agent +tools=Dict( # update input format + "askbox"=> Dict( + :description => "Useful for when you need to ask the user for more context. Do not ask the user their own question.", + :input => """Input is a text in JSON format.{\"Q1\": \"How are you doing?\", \"Q2\": \"How may I help you?\"}""", + :output => "" , + :func => nothing, + ), + # "winestock"=> Dict( + # :description => "A handy tool for searching wine in your inventory that match the user preferences.", + # :input => """Input is a JSON-formatted string that contains a detailed and precise search query.{\"wine type\": \"rose\", \"price\": \"max 35\", \"sweetness level\": \"sweet\", \"intensity level\": \"light bodied\", \"Tannin level\": \"low\", \"Acidity level\": \"low\"}""", + # :output => """Output are wines that match the search query in JSON format.""", + # :func => ChatAgent.winestock, + # ), + "finalanswer"=> Dict( + :description => "Useful for when you are ready to recommend wines to the user.", + :input => """{\"finalanswer\": \"some text\"}.{\"finalanswer\": \"I recommend Zena Crown Vista\"}""", + :output => "" , + :func => nothing, + ), + ) + + a = YiemAgent.sommelier( + receiveUserMsgChannel, + receiveInternalMsgChannel, + agentConfig, + name= "assistant", + id= "testingSessionID", # agent instance id + tools=tools, + ) + + +input = +OrderedDict{Symbol, Any}( + :question => "Hello, I would like a get a bottle of wine", + :thought_1 => "A customer wants to buy a bottle of wine. Before making a recommendation, I need to know more about their preferences.", + :action_1 => Dict{Symbol, Any}( + :name => "chatbox", + :input => "What is the occasion for which you're buying this wine?" + ), + :observation_1 => "We are holding a wedding party", + + :thought_2 => "A wedding party, that's a great occasion! The customer might be looking for a celebratory drink. Let me ask some more questions to narrow down the options.", + :action_2 => Dict{Symbol, Any}( + :name => "chatbox", + :input => "What type of food will you be serving at the wedding?" + ), + :observation_2 => "It will be Thai dishes.", + + :thought_3 => "With Thai food, I should recommend a wine that complements its spicy and savory flavors. And since it's a celebratory occasion, the customer might prefer a full-bodied wine.", + :action_3 => Dict{Symbol, Any}( + :name => "chatbox", + :input => "What is your budget for this bottle of wine?" + ), + :observation_3 => "I would spend up to 50 bucks.", + + :thought_4 => "Now that I have some more information, it's time to narrow down the options.", + :action_4 => Dict{Symbol, Any}( + :name => "winestock", + :input => "red wine with full body, pairs well with spicy food, budget \$50"), + :observation_4 => "I found the following wines in our stock: \n{\n 1: El Enemigo Cabernet Franc 2019\n2: Tantara Chardonnay 2017\n\n}\n", + + :thought_5 => "Now that I have a list of potential wines, I need to know more about the customer's taste preferences.", + :action_5 => Dict{Symbol, Any}( + :name => "chatbox", + :input => "What type of wine characteristics are you looking for? (e.g. t.e.g. tannin level, sweetness, intensity, acidity)" + ), + :observation_5 => "I like full-bodied Red wine with low tannin.", + + :thought_6 => "Now that I have more information about the customer's preferences, it's time to make a recommendation.", + :action_6 => Dict{Symbol, Any}( + :name => "recommendbox", + :input => "El Enemigo Cabernet Franc 2019" + ), + :observation_6 => "I don't like the one you recommend. I want dry wine." + ) + + + + +result = YiemAgent.reflector(a, input) + + + + + + + + + + + + + + + + + + +