update
This commit is contained in:
@@ -2,7 +2,7 @@ module llmfunction
|
||||
|
||||
export virtualWineUserChatbox, jsoncorrection, checkinventory, # recommendbox,
|
||||
virtualWineUserRecommendbox, userChatbox, userRecommendbox, extractWineAttributes_1,
|
||||
extractWineAttributes_2
|
||||
extractWineAttributes_2, paraphrase
|
||||
|
||||
using HTTP, JSON3, URIs, Random, PrettyPrinting, UUIDs, Dates
|
||||
using GeneralUtils, SQLLLM
|
||||
@@ -307,8 +307,6 @@ function checkinventory(a::T1, input::T2
|
||||
println("\n~~~ checkinventory result ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
println(textresult)
|
||||
|
||||
#[WORKING] when rawresponse is nothing, AI get errors
|
||||
|
||||
return (result=textresult, rawresponse=rawresponse, success=true, errormsg=nothing)
|
||||
end
|
||||
|
||||
@@ -684,168 +682,110 @@ function extractWineAttributes_2(a::T1, input::T2)::String where {T1<:agent, T2<
|
||||
end
|
||||
|
||||
|
||||
# function concept(a::sommelier, thoughtDict)
|
||||
# systemmsg =
|
||||
# """
|
||||
# Your name: N/A
|
||||
# Situation:
|
||||
# - You are a helpful assistant
|
||||
# Your vision:
|
||||
# - This is a good opportunity to help the user
|
||||
# Your mission:
|
||||
# - To describe the concept of a conversation
|
||||
# Mission's objective includes:
|
||||
# - To
|
||||
# Your responsibility includes:
|
||||
# 1) Given the situation, convey your thoughts to the user.
|
||||
# Your responsibility excludes:
|
||||
# 1) Asking or guiding the user to make a purchase
|
||||
# 2) Processing sales orders or engaging in any other sales-related activities
|
||||
# 3) Answering questions and offering additional services beyond just recommendations, such as delivery, box, gift wrapping, personalized messages. Customers can reach out to our sales at the store.
|
||||
# Your profile:
|
||||
# - You are a young professional in a big company.
|
||||
# - You are avid party goer
|
||||
# - You like beer.
|
||||
# - You know nothing about wine.
|
||||
# - You have a budget of 1500usd.
|
||||
# Additional information:
|
||||
# - your boss like spicy food.
|
||||
# - your boss is a middle-aged man.
|
||||
function paraphrase(text2textInstructLLM::Function, text::String)
|
||||
systemmsg =
|
||||
"""
|
||||
Your name: N/A
|
||||
Your vision:
|
||||
- You are a helpful assistant who help the user to paraphrase their text.
|
||||
Your mission:
|
||||
- To help paraphrase the user's text
|
||||
Mission's objective includes:
|
||||
- To help paraphrase the user's text
|
||||
Your responsibility includes:
|
||||
1) To help paraphrase the user's text
|
||||
Your responsibility does NOT includes:
|
||||
1) N/A
|
||||
Your profile:
|
||||
- N/A
|
||||
Additional information:
|
||||
- N/A
|
||||
|
||||
# At each round of conversation, you will be given the following information:
|
||||
# Your ongoing conversation with the user: ...
|
||||
# Context: ...
|
||||
# Your thoughts: Your current thoughts in your mind
|
||||
At each round of conversation, you will be given the following information:
|
||||
Text: The user's given text
|
||||
|
||||
# You MUST follow the following guidelines:
|
||||
# - Do not offer additional services you didn't thought.
|
||||
You MUST follow the following guidelines:
|
||||
- N/A
|
||||
|
||||
# You should follow the following guidelines:
|
||||
# - Focus on the latest conversation.
|
||||
# - If the user interrupts, prioritize the user
|
||||
# - Medium and full-bodied red wines should not be paired with spicy foods.
|
||||
You should follow the following guidelines:
|
||||
- N/A
|
||||
|
||||
# You should then respond to the user with:
|
||||
# 1) Chat: Given the situation, How would you respond to the user to express your thoughts honestly and keep the conversation going smoothly?
|
||||
You should then respond to the user with:
|
||||
1) Paraphrase: Paraphrased text
|
||||
|
||||
# You should only respond in format as described below:
|
||||
# Chat: ...
|
||||
You should only respond in format as described below:
|
||||
Paraphrase: ...
|
||||
|
||||
# Here are some examples of response format:
|
||||
# Chat: "I see. Let me think about it. I'll get back to you with my recommendation."
|
||||
Let's begin!
|
||||
"""
|
||||
|
||||
# Let's begin!
|
||||
# """
|
||||
errornote = ""
|
||||
response = nothing # placeholder for show when error msg show up
|
||||
|
||||
# # a.memory[:shortmem][:available_wine] is a dataframe.
|
||||
# context =
|
||||
# if haskey(a.memory[:shortmem], :available_wine)
|
||||
# "Available wines $(GeneralUtils.dfToString(a.memory[:shortmem][:available_wine]))"
|
||||
# else
|
||||
# "None"
|
||||
# end
|
||||
|
||||
# chathistory = vectorOfDictToText(a.chathistory)
|
||||
# errornote = ""
|
||||
# response = nothing # placeholder for show when error msg show up
|
||||
for attempt in 1:10
|
||||
usermsg = """
|
||||
Text: $text
|
||||
$errornote
|
||||
"""
|
||||
|
||||
# for attempt in 1:10
|
||||
# usermsg = """
|
||||
# Your ongoing conversation with the user: $chathistory
|
||||
# Contex: $context
|
||||
# Your thoughts: $(thoughtDict[:understanding]) $(thoughtDict[:reasoning]) $(thoughtDict[:plan])
|
||||
# $errornote
|
||||
# """
|
||||
_prompt =
|
||||
[
|
||||
Dict(:name => "system", :text => systemmsg),
|
||||
Dict(:name => "user", :text => usermsg)
|
||||
]
|
||||
|
||||
# _prompt =
|
||||
# [
|
||||
# Dict(:name => "system", :text => systemmsg),
|
||||
# Dict(:name => "user", :text => usermsg)
|
||||
# ]
|
||||
# put in model format
|
||||
prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
prompt *= """
|
||||
<|start_header_id|>assistant<|end_header_id|>
|
||||
"""
|
||||
|
||||
# # put in model format
|
||||
# prompt = GeneralUtils.formatLLMtext(_prompt; formatname="llama3instruct")
|
||||
# prompt *= """
|
||||
# <|start_header_id|>assistant<|end_header_id|>
|
||||
# """
|
||||
try
|
||||
response = text2textInstructLLM(prompt)
|
||||
# sometime the model response like this "here's how I would respond: ..."
|
||||
if occursin("respond:", response)
|
||||
errornote = "You don't need to intro your response"
|
||||
error("\n~~~ paraphrase() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
end
|
||||
response = GeneralUtils.remove_french_accents(response)
|
||||
response = replace(response, '*'=>"")
|
||||
response = replace(response, '$' => "USD")
|
||||
response = replace(response, '`' => "")
|
||||
response = GeneralUtils.remove_french_accents(response)
|
||||
responsedict = GeneralUtils.textToDict(response, ["Paraphrase"],
|
||||
rightmarker=":", symbolkey=true, lowercasekey=true)
|
||||
|
||||
# try
|
||||
# response = a.func[:text2textInstructLLM](prompt)
|
||||
# # sometime the model response like this "here's how I would respond: ..."
|
||||
# if occursin("respond:", response)
|
||||
# errornote = "You don't need to intro your response"
|
||||
# error("generatechat() response contain : ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
# end
|
||||
# response = GeneralUtils.remove_french_accents(response)
|
||||
# response = replace(response, '*'=>"")
|
||||
# response = replace(response, '$' => "USD")
|
||||
# response = replace(response, '`' => "")
|
||||
# response = GeneralUtils.remove_french_accents(response)
|
||||
# responsedict = GeneralUtils.textToDict(response, ["Chat"],
|
||||
# rightmarker=":", symbolkey=true, lowercasekey=true)
|
||||
for i ∈ [:paraphrase]
|
||||
if length(JSON3.write(responsedict[i])) == 0
|
||||
error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
end
|
||||
end
|
||||
|
||||
# for i ∈ [:chat]
|
||||
# if length(JSON3.write(responsedict[i])) == 0
|
||||
# error("$i is empty ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
# end
|
||||
# end
|
||||
# check if there are more than 1 key per categories
|
||||
for i ∈ [:paraphrase]
|
||||
matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
|
||||
if length(matchkeys) > 1
|
||||
error("paraphrase() has more than one key per categories")
|
||||
end
|
||||
end
|
||||
|
||||
# # check if there are more than 1 key per categories
|
||||
# for i ∈ [:chat]
|
||||
# matchkeys = GeneralUtils.findMatchingDictKey(responsedict, i)
|
||||
# if length(matchkeys) > 1
|
||||
# error("generatechat has more than one key per categories")
|
||||
# end
|
||||
# end
|
||||
println("\n~~~ paraphrase() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
pprintln(Dict(responsedict))
|
||||
|
||||
# # check if Context: is in chat
|
||||
# if occursin("Context:", responsedict[:chat])
|
||||
# error("Context: is in text. This is not allowed")
|
||||
# end
|
||||
result = responsedict[:paraphrase]
|
||||
|
||||
# println("\n~~~ generatechat() ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
# pprintln(Dict(responsedict))
|
||||
|
||||
# # check whether an agent recommend wines before checking inventory or recommend wines
|
||||
# # outside its inventory
|
||||
# # ask LLM whether there are any winery mentioned in the response
|
||||
# mentioned_winery = detectWineryName(a, responsedict[:chat])
|
||||
# if mentioned_winery != "None"
|
||||
# mentioned_winery = String.(strip.(split(mentioned_winery, ",")))
|
||||
|
||||
# # check whether the wine is in event
|
||||
# isWineInEvent = false
|
||||
# for winename in mentioned_winery
|
||||
# for event in a.memory[:events]
|
||||
# if event[:outcome] !== nothing && occursin(winename, event[:outcome])
|
||||
# isWineInEvent = true
|
||||
# break
|
||||
# end
|
||||
# end
|
||||
# end
|
||||
|
||||
# # if wine is mentioned but not in timeline or shortmem,
|
||||
# # then the agent is not supposed to recommend the wine
|
||||
# if isWineInEvent == false
|
||||
|
||||
# errornote = "Previously: You recommend a wine that is not in your inventory which is not allowed."
|
||||
# error("Previously: You recommend a wine that is not in your inventory which is not allowed.")
|
||||
# end
|
||||
# end
|
||||
|
||||
# result = responsedict[:chat]
|
||||
|
||||
# return result
|
||||
# catch e
|
||||
# io = IOBuffer()
|
||||
# showerror(io, e)
|
||||
# errorMsg = String(take!(io))
|
||||
# st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
|
||||
# println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
# end
|
||||
# end
|
||||
# error("generatechat failed to generate a response")
|
||||
# end
|
||||
return result
|
||||
catch e
|
||||
io = IOBuffer()
|
||||
showerror(io, e)
|
||||
errorMsg = String(take!(io))
|
||||
st = sprint((io, v) -> show(io, "text/plain", v), stacktrace(catch_backtrace()))
|
||||
println("\nAttempt $attempt. Error occurred: $errorMsg\n$st ", Dates.now(), " ", @__FILE__, " ", @__LINE__)
|
||||
end
|
||||
end
|
||||
error("generatechat failed to generate a response")
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user