This commit is contained in:
narawat lamaiin
2025-04-01 21:17:15 +07:00
parent b8fd772a28
commit c21f943b12
4 changed files with 64 additions and 40 deletions

View File

@@ -268,7 +268,7 @@ end
# Returns
- `timeline::String`
A formatted string representing the events with their subjects, actions, and optional outcomes
Format: "{subject}> {actioninput} {outcome}\n" for each event
Format: "{index}) {subject}> {actioninput} {outcome}\n" for each event
# Example
@@ -277,24 +277,32 @@ events = [
Dict(:subject => "Assistant", :actioninput => "Hi there!", :outcome => "with a smile")
]
timeline = createTimeline(events)
# User> Hello
# Assistant> Hi there! with a smile
# 1) User> Hello
# 2) Assistant> Hi there! with a smile
"""
function createTimeline(events::T1) where {T1<:AbstractVector}
# Initialize empty timeline string
timeline = ""
# Iterate through events with index
for (i, event) in enumerate(events)
# If no outcome exists, format without outcome
if event[:outcome] === nothing
timeline *= "$i) $(event[:subject])> $(event[:actioninput])\n"
timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput])\n"
# If outcome exists, include it in formatting
else
timeline *= "$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
timeline *= "Event_$i) $(event[:subject])> $(event[:actioninput]) $(event[:outcome])\n"
end
end
# Return formatted timeline string
return timeline
end
# """ Convert a single chat dictionary into LLM model instruct format.
# # Llama 3 instruct format example