update
This commit is contained in:
@@ -258,6 +258,10 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol, Any} where {T<:age
|
||||
|
||||
You should follow the following guidelines:
|
||||
- Identifying at least four preferences before checking inventory significantly improves search results
|
||||
- Sometimes, the item a user desires might not be available in your inventory. In such cases, inform the user that the item is unavailable and suggest an alternative instead.
|
||||
|
||||
For your information:
|
||||
- vintage 0 means non-vintage.
|
||||
|
||||
You should then respond to the user with interleaving Understanding, Reasoning, Plan, Action:
|
||||
1) Understanding:
|
||||
@@ -269,7 +273,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol, Any} where {T<:age
|
||||
- CHATBOX which you can use to talk with the user. The input is your intentions for the dialogue. Be specific.
|
||||
- CHECKINVENTORY which you can use to check info about wine in your inventory. The input is a search term in verbal English.
|
||||
Good query example: black car, a stereo, 200 mile range, electric motor.
|
||||
- PRESENTBOX which you can use to introduce / suggest / recommend wines you just found in the database to the user.
|
||||
- PRESENTBOX which you can use to introduce / suggest / recommend wine label you just found in the inventory to the user. Not for general conversation nor follow up conversation.
|
||||
The input is instructions on how you want the presentation to be conducted.
|
||||
Here are some input examples,
|
||||
"First, provide detailed introductions of Zena Crown, Schrader Cabernet Sauvignon.
|
||||
@@ -277,7 +281,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol, Any} where {T<:age
|
||||
Third, explain the potential impact each option could bring to the user."
|
||||
- ENDCONVERSATION which you can use when you want to finish the conversation with the user. The input is "NA".
|
||||
4) Action_input: input of the action
|
||||
5) Mentioning_wine: Are you mentioning specific wine name to the user? Can be "Yes" or "No"
|
||||
5) Mentioning_wine: Are you mentioning specific wine label or winery to the user? Can be "Yes" or "No"
|
||||
|
||||
You should only respond in format as described below:
|
||||
Understanding: ...
|
||||
@@ -317,8 +321,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol, Any} where {T<:age
|
||||
response = nothing # placeholder for show when error msg show up
|
||||
|
||||
for attempt in 1:10
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
Recap: $(a.memory[:recap])
|
||||
Your recent events: $timeline
|
||||
Your Q&A: $(a.memory[:QandA])
|
||||
@@ -333,8 +336,7 @@ function decisionMaker(a::T; recent::Integer=5)::Dict{Symbol, Any} where {T<:age
|
||||
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
@@ -421,8 +423,7 @@ julia>
|
||||
function evaluator(config::T1, state::T2
|
||||
)::Tuple{String,Integer} where {T1<:AbstractDict,T2<:AbstractDict}
|
||||
|
||||
systemmsg =
|
||||
"""
|
||||
systemmsg = """
|
||||
Analyze the trajectories of a solution to a question answering task. The trajectories are
|
||||
labeled by environmental observations about the situation, thoughts that can reason about
|
||||
the current situation and actions that can be three types:
|
||||
@@ -475,8 +476,7 @@ function evaluator(config::T1, state::T2
|
||||
Let's begin!
|
||||
"""
|
||||
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
$(JSON3.write(state[:thoughtHistory]))
|
||||
"""
|
||||
|
||||
@@ -488,8 +488,7 @@ function evaluator(config::T1, state::T2
|
||||
|
||||
# put in model format
|
||||
prompt = formatLLMtext(chathistory, "llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
{
|
||||
"""
|
||||
@@ -525,8 +524,7 @@ function evaluator(config::T1, state::T2
|
||||
try
|
||||
response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg)
|
||||
_responseJsonStr = response[:response][:text]
|
||||
expectedJsonExample =
|
||||
"""
|
||||
expectedJsonExample = """
|
||||
Here is an expected JSON format:
|
||||
{"evaluation": "...", "score": "..."}
|
||||
"""
|
||||
@@ -571,8 +569,7 @@ julia>
|
||||
function reflector(config::T1, state::T2)::String where {T1<:AbstractDict,T2<:AbstractDict}
|
||||
# https://github.com/andyz245/LanguageAgentTreeSearch/blob/main/hotpot/hotpot.py
|
||||
|
||||
_prompt =
|
||||
"""
|
||||
_prompt = """
|
||||
You are a helpful sommelier working for a wine store.
|
||||
Your goal is to recommend the best wine from your inventory that match the user preferences.
|
||||
You will be given a question and a trajectory of the previous help you've done for a user.
|
||||
@@ -657,8 +654,7 @@ function reflector(config::T1, state::T2)::String where {T1<:AbstractDict, T2<:A
|
||||
try
|
||||
response = GeneralUtils.sendReceiveMqttMsg(outgoingMsg)
|
||||
_responseJsonStr = response[:response][:text]
|
||||
expectedJsonExample =
|
||||
"""
|
||||
expectedJsonExample = """
|
||||
Here is an expected JSON format:
|
||||
{"reflection": "..."}
|
||||
"""
|
||||
@@ -1048,8 +1044,7 @@ julia>
|
||||
# Signature
|
||||
"""
|
||||
function generatechat(a::sommelier)
|
||||
systemmsg =
|
||||
"""
|
||||
systemmsg = """
|
||||
Your name is $(a.name). You are a helpful assistant acting as a polite, website-based sommelier for an online wine store.
|
||||
You are currently talking with the user.
|
||||
Your goal includes:
|
||||
@@ -1075,7 +1070,7 @@ function generatechat(a::sommelier)
|
||||
- If the user interrupts, prioritize the user
|
||||
|
||||
You should then respond to the user with:
|
||||
1) Mentioning_wine: Are you going to mentioning specific wine name to the user? Can be "Yes" or "No"
|
||||
1) Mentioning_wine: Are you going to mentioning specific wine label or winery to the user? Can be "Yes" or "No"
|
||||
2) Chat: Given the situation, what would you say to the user?
|
||||
|
||||
You should only respond in format as described below:
|
||||
@@ -1097,8 +1092,7 @@ function generatechat(a::sommelier)
|
||||
response = nothing # placeholder for show when error msg show up
|
||||
|
||||
for attempt in 1:10
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
Your ongoing conversation with the user: $chathistory
|
||||
$context
|
||||
Your thoughts: $(a.memory[:CHATBOX])
|
||||
@@ -1113,8 +1107,7 @@ function generatechat(a::sommelier)
|
||||
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
@@ -1176,8 +1169,7 @@ function generatechat(a::sommelier)
|
||||
end
|
||||
|
||||
function generatechat(a::companion)
|
||||
systemmsg =
|
||||
"""
|
||||
systemmsg = """
|
||||
Your name is $(a.name). You are a helpful assistant.
|
||||
You are currently talking with the user.
|
||||
Your goal includes:
|
||||
@@ -1204,8 +1196,7 @@ function generatechat(a::companion)
|
||||
noise = ""
|
||||
|
||||
for attempt in 1:10
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
Your ongoing conversation with the user: $chathistory
|
||||
$noise
|
||||
"""
|
||||
@@ -1218,8 +1209,7 @@ function generatechat(a::companion)
|
||||
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
@@ -1293,8 +1283,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
# Let's begin!
|
||||
# """
|
||||
|
||||
systemmsg =
|
||||
"""
|
||||
systemmsg = """
|
||||
Your name is $(a.name). You are a helpful assistant acting as a polite, website-based sommelier for $(a.retailername)'s online store.
|
||||
Your goal includes:
|
||||
1) Help the user select the best wines from your inventory that align with the user's preferences
|
||||
@@ -1342,6 +1331,9 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
...
|
||||
|
||||
Here are some examples:
|
||||
Q: Why the user saying this?
|
||||
A: According to the situation, ...
|
||||
|
||||
Q: The user is asking for a cappuccino. Do I have it at my cafe?
|
||||
A: No I don't.
|
||||
|
||||
@@ -1351,17 +1343,26 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
Q: Are they allergic to milk?
|
||||
A: According to the situation, since they mentioned a cappuccino before, it seems they are not allergic to milk.
|
||||
|
||||
Q: Have I searched the database yet?
|
||||
Q: Have I searched the inventory yet?
|
||||
A: According to the situation, no. I need more information.
|
||||
|
||||
Q: Did I found something in the database?
|
||||
Q: Should I check the inventory now?
|
||||
A: According to the situation, ...
|
||||
|
||||
Q: What do I have in the inventory?
|
||||
A: According to the situation, ...
|
||||
|
||||
Q: Which items are within the user price range? And which items are out of the user price rance?
|
||||
A: According to the situation, ...
|
||||
|
||||
Q: Do I have what the user wants in stock?
|
||||
A: According to the situation, ...
|
||||
|
||||
Q: Did I introduce the coffee blend varieties to the user yet?
|
||||
A: According to the situation, no, I didn't because I have not searched the database yet.
|
||||
A: According to the situation, no, I didn't because I have not searched the inventory yet.
|
||||
|
||||
Q: Am I certain about the information I'm going to share with the user, or should I verify the information first?
|
||||
A: According to the situation, ...
|
||||
|
||||
Let's begin!
|
||||
"""
|
||||
@@ -1387,8 +1388,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
response = nothing # store for show when error msg show up
|
||||
|
||||
for attempt in 1:10
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
Recap: $(a.memory[:recap])
|
||||
Your recent events: $timeline
|
||||
$errornote
|
||||
@@ -1402,8 +1402,7 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
@@ -1413,10 +1412,10 @@ function generatequestion(a, text2textInstructLLM::Function; recent=nothing)::St
|
||||
if q_number < 3
|
||||
error("too few questions only $q_number questions are generated ", @__FILE__, " ", @__LINE__)
|
||||
# check whether "A1" is in the response, if not error.
|
||||
elseif !occursin("A1", response)
|
||||
elseif !occursin("A1:", response)
|
||||
error("no answer found in the response ", @__FILE__, " ", @__LINE__)
|
||||
end
|
||||
# response = string(split(response, "Please")[1]) # LLM usually add comments which is no need.
|
||||
|
||||
responsedict = GeneralUtils.textToDict(response,
|
||||
["Understanding", "Q1"],
|
||||
rightmarker=":", symbolkey=true, lowercasekey=true)
|
||||
@@ -1619,8 +1618,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
|
||||
# Let's begin!
|
||||
# """
|
||||
|
||||
systemmsg =
|
||||
"""
|
||||
systemmsg = """
|
||||
You are the assistant being in the given events.
|
||||
Your task is to writes a summary for each event in an ongoing, interleaving series.
|
||||
|
||||
@@ -1663,8 +1661,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
|
||||
response = nothing # store for show when error msg show up
|
||||
|
||||
for attempt in 1:10
|
||||
usermsg =
|
||||
"""
|
||||
usermsg = """
|
||||
Total events: $(length(events))
|
||||
Events timeline: $timeline
|
||||
$errornote
|
||||
@@ -1678,8 +1675,7 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
|
||||
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *=
|
||||
"""
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
@@ -1687,9 +1683,10 @@ function generateSituationReport(a, text2textInstructLLM::Function; skiprecent::
|
||||
# responsedict = GeneralUtils.textToDict(response,
|
||||
# ["summary", "presented", "selected"],
|
||||
# rightmarker=":", symbolkey=true)
|
||||
# println("--> generateSituationReport ", @__FILE__, " ", @__LINE__)
|
||||
println("\n~~~ generateSituationReport() ", @__FILE__, " ", @__LINE__)
|
||||
pprintln(response)
|
||||
|
||||
|
||||
eventcount = count("Event_", response)
|
||||
|
||||
# if eventcount < (length(events))
|
||||
|
||||
Reference in New Issue
Block a user