This commit is contained in:
2026-02-19 12:27:15 +07:00
parent 51e494c48b
commit 7386f8ed0b
11 changed files with 200468 additions and 6 deletions

View File

@@ -2,7 +2,7 @@
julia_version = "1.12.5"
manifest_format = "2.0"
project_hash = "24baf0eb17859281acbf0208c4164e7fb92fabbe"
project_hash = "8a7a8b88d777403234a6816e699fb0ab1e991aac"
[[deps.AliasTables]]
deps = ["PtrArrays", "Random"]

View File

@@ -1,5 +1,6 @@
[deps]
Arrow = "69666777-d1a9-59fb-9406-91d4454c9d45"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
GeneralUtils = "c6c72f09-b708-4ac8-ac7c-2084d70108fe"
HTTP = "cd3eb016-35fb-5094-929b-558a96fad6f3"

200014
received_large_dict.json Normal file

File diff suppressed because it is too large Load Diff

13
received_small_dict.json Normal file
View File

@@ -0,0 +1,13 @@
{
"name": "Alice",
"scores": [
95,
88,
92
],
"metadata": {
"height": 155,
"weight": 55
},
"age": 30
}

View File

@@ -0,0 +1,82 @@
#!/usr/bin/env julia
# Test script for Dictionary transport testing
# Tests receiving 1 large and 1 small Dictionaries via direct and link transport
# Uses NATSBridge.jl smartreceive with "dictionary" type
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# Include the bridge module
include("../src/NATSBridge.jl")
using .NATSBridge
# Configuration
const SUBJECT = "/NATSBridge_dict_test"
const NATS_URL = "nats.yiem.cc"
const FILESERVER_URL = "http://192.168.88.104:8080"
# ------------------------------------------------------------------------------------------------ #
# test dictionary transfer #
# ------------------------------------------------------------------------------------------------ #
# Helper: Log with correlation ID
function log_trace(message)
timestamp = Dates.now()
println("[$timestamp] $message")
end
# Receiver: Listen for messages and verify Dictionary handling
function test_dict_receive()
conn = NATS.connect(NATS_URL)
NATS.subscribe(conn, SUBJECT) do msg
log_trace("Received message on $(msg.subject)")
# Use NATSBridge.smartreceive to handle the data
# API: smartreceive(msg, download_handler; max_retries, base_delay, max_delay)
result = NATSBridge.smartreceive(
msg;
max_retries = 5,
base_delay = 100,
max_delay = 5000
)
# Result is a list of (dataname, data, data_type) tuples
for (dataname, data, data_type) in result
if isa(data, JSON.Object{String, Any})
log_trace("Received Dictionary '$dataname' of type $data_type")
# Display dictionary contents
println(" Contents:")
for (key, value) in data
println(" $key => $value")
end
# Save to JSON file
output_path = "./received_$dataname.json"
json_str = JSON.json(data, 2)
write(output_path, json_str)
log_trace("Saved Dictionary to $output_path")
else
log_trace("Received unexpected data type for '$dataname': $(typeof(data))")
end
end
end
# Keep listening for 10 seconds
sleep(120)
NATS.drain(conn)
end
# Run the test
println("Starting Dictionary transport test...")
println("Note: This receiver will wait for messages from the sender.")
println("Run test_julia_to_julia_dict_sender.jl first to send test data.")
# Run receiver
println("testing smartreceive")
test_dict_receive()
println("Test completed.")

View File

@@ -0,0 +1,136 @@
#!/usr/bin/env julia
# Test script for Dictionary transport testing
# Tests sending 1 large and 1 small Dictionaries via direct and link transport
# Uses NATSBridge.jl smartsend with "dictionary" type
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# Include the bridge module
include("../src/NATSBridge.jl")
using .NATSBridge
# Configuration
const SUBJECT = "/NATSBridge_dict_test"
const NATS_URL = "nats.yiem.cc"
const FILESERVER_URL = "http://192.168.88.104:8080"
# Create correlation ID for tracing
correlation_id = string(uuid4())
# ------------------------------------------------------------------------------------------------ #
# test dictionary transfer #
# ------------------------------------------------------------------------------------------------ #
# Helper: Log with correlation ID
function log_trace(message)
timestamp = Dates.now()
println("[$timestamp] [Correlation: $correlation_id] $message")
end
# File upload handler for plik server
function plik_upload_handler(fileserver_url::String, dataname::String, data::Vector{UInt8})::Dict{String, Any}
# Get upload ID
url_getUploadID = "$fileserver_url/upload"
headers = ["Content-Type" => "application/json"]
body = """{ "OneShot" : true }"""
httpResponse = HTTP.request("POST", url_getUploadID, headers, body; body_is_form=false)
responseJson = JSON.parse(String(httpResponse.body))
uploadid = responseJson["id"]
uploadtoken = responseJson["uploadToken"]
# Upload file
file_multipart = HTTP.Multipart(dataname, IOBuffer(data), "application/octet-stream")
url_upload = "$fileserver_url/file/$uploadid"
headers = ["X-UploadToken" => uploadtoken]
form = HTTP.Form(Dict("file" => file_multipart))
httpResponse = HTTP.post(url_upload, headers, form)
responseJson = JSON.parse(String(httpResponse.body))
fileid = responseJson["id"]
url = "$fileserver_url/file/$uploadid/$fileid/$dataname"
return Dict("status" => httpResponse.status, "uploadid" => uploadid, "fileid" => fileid, "url" => url)
end
# Sender: Send Dictionaries via smartsend
function test_dict_send()
# Create a small Dictionary (will use direct transport)
small_dict = Dict(
"name" => "Alice",
"age" => 30,
"scores" => [95, 88, 92],
"metadata" => Dict(
"height" => 155,
"weight" => 55
)
)
# Create a large Dictionary (will use link transport if > 1MB)
# Generate a larger dataset (~2MB to ensure link transport)
large_dict = Dict(
"ids" => collect(1:50000),
"names" => ["User_$i" for i in 1:50000],
"scores" => rand(1:100, 50000),
"categories" => ["Category_$(rand(1:10))" for i in 1:50000],
"metadata" => Dict(
"source" => "test_generator",
"timestamp" => string(Dates.now())
)
)
# Test data 1: small Dictionary
data1 = ("small_dict", small_dict, "dictionary")
# Test data 2: large Dictionary
data2 = ("large_dict", large_dict, "dictionary")
# Use smartsend with dictionary type
# For small Dictionary: will use direct transport (JSON encoded)
# For large Dictionary: will use link transport (uploaded to fileserver)
env = NATSBridge.smartsend(
SUBJECT,
[data1, data2], # List of (dataname, data, type) tuples
nats_url = NATS_URL,
fileserver_url = FILESERVER_URL,
fileserverUploadHandler = plik_upload_handler,
size_threshold = 1_000_000, # 1MB threshold
correlation_id = correlation_id,
msg_purpose = "chat",
sender_name = "dict_sender",
receiver_name = "",
receiver_id = "",
reply_to = "",
reply_to_msg_id = ""
)
log_trace("Sent message with $(length(env.payloads)) payloads")
# Log transport type for each payload
for (i, payload) in enumerate(env.payloads)
log_trace("Payload $i ('$payload.dataname'):")
log_trace(" Transport: $(payload.transport)")
log_trace(" Type: $(payload.type)")
log_trace(" Size: $(payload.size) bytes")
log_trace(" Encoding: $(payload.encoding)")
if payload.transport == "link"
log_trace(" URL: $(payload.data)")
end
end
end
# Run the test
println("Starting Dictionary transport test...")
println("Correlation ID: $correlation_id")
# Run sender
println("start smartsend for dictionaries")
test_dict_send()
println("Test completed.")

View File

@@ -3,8 +3,7 @@
# Tests sending a large file (> 1MB) via smartsend with binary type
# Updated to match NATSBridge.jl API
using NATS, JSON, UUIDs, Dates
using HTTP
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# workdir =

View File

@@ -3,8 +3,7 @@
# Tests sending a large file (> 1MB) via smartsend with binary type
# Updated to match NATSBridge.jl API
using NATS, JSON, UUIDs, Dates
using HTTP
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# workdir =

View File

@@ -0,0 +1,84 @@
#!/usr/bin/env julia
# Test script for DataFrame table transport testing
# Tests receiving 1 large and 1 small DataFrames via direct and link transport
# Uses NATSBridge.jl smartreceive with "table" type
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# Include the bridge module
include("../src/NATSBridge.jl")
using .NATSBridge
# Configuration
const SUBJECT = "/NATSBridge_table_test"
const NATS_URL = "nats.yiem.cc"
const FILESERVER_URL = "http://192.168.88.104:8080"
# ------------------------------------------------------------------------------------------------ #
# test table transfer #
# ------------------------------------------------------------------------------------------------ #
# Helper: Log with correlation ID
function log_trace(message)
timestamp = Dates.now()
println("[$timestamp] $message")
end
# Receiver: Listen for messages and verify DataFrame table handling
function test_table_receive()
conn = NATS.connect(NATS_URL)
NATS.subscribe(conn, SUBJECT) do msg
log_trace("Received message on $(msg.subject)")
# Use NATSBridge.smartreceive to handle the data
# API: smartreceive(msg, download_handler; max_retries, base_delay, max_delay)
result = NATSBridge.smartreceive(
msg;
max_retries = 5,
base_delay = 100,
max_delay = 5000
)
# Result is a list of (dataname, data, data_type) tuples
for (dataname, data, data_type) in result
data = DataFrame(data)
if isa(data, DataFrame)
log_trace("Received DataFrame '$dataname' of type $data_type")
log_trace(" Dimensions: $(size(data, 1)) rows x $(size(data, 2)) columns")
log_trace(" Column names: $(names(data))")
# Display first few rows
println(" First 5 rows:")
display(data[1:min(5, size(data, 1)), :])
# Save to file
output_path = "./received_$dataname.arrow"
io = IOBuffer()
Arrow.write(io, data)
write(output_path, take!(io))
log_trace("Saved DataFrame to $output_path")
else
log_trace("Received unexpected data type for '$dataname': $(typeof(data))")
end
end
end
# Keep listening for 10 seconds
sleep(120)
NATS.drain(conn)
end
# Run the test
println("Starting DataFrame table transport test...")
println("Note: This receiver will wait for messages from the sender.")
println("Run test_julia_to_julia_table_sender.jl first to send test data.")
# Run receiver
println("testing smartreceive")
test_table_receive()
println("Test completed.")

View File

@@ -0,0 +1,134 @@
#!/usr/bin/env julia
# Test script for DataFrame table transport testing
# Tests sending 1 large and 1 small DataFrames via direct and link transport
# Uses NATSBridge.jl smartsend with "table" type
using NATS, JSON, UUIDs, Dates, PrettyPrinting, DataFrames, Arrow, HTTP
# Include the bridge module
include("../src/NATSBridge.jl")
using .NATSBridge
# Configuration
const SUBJECT = "/NATSBridge_table_test"
const NATS_URL = "nats.yiem.cc"
const FILESERVER_URL = "http://192.168.88.104:8080"
# Create correlation ID for tracing
correlation_id = string(uuid4())
# ------------------------------------------------------------------------------------------------ #
# test table transfer #
# ------------------------------------------------------------------------------------------------ #
# Helper: Log with correlation ID
function log_trace(message)
timestamp = Dates.now()
println("[$timestamp] [Correlation: $correlation_id] $message")
end
# File upload handler for plik server
function plik_upload_handler(fileserver_url::String, dataname::String, data::Vector{UInt8})::Dict{String, Any}
# Get upload ID
url_getUploadID = "$fileserver_url/upload"
headers = ["Content-Type" => "application/json"]
body = """{ "OneShot" : true }"""
httpResponse = HTTP.request("POST", url_getUploadID, headers, body; body_is_form=false)
responseJson = JSON.parse(String(httpResponse.body))
uploadid = responseJson["id"]
uploadtoken = responseJson["uploadToken"]
# Upload file
file_multipart = HTTP.Multipart(dataname, IOBuffer(data), "application/octet-stream")
url_upload = "$fileserver_url/file/$uploadid"
headers = ["X-UploadToken" => uploadtoken]
form = HTTP.Form(Dict("file" => file_multipart))
httpResponse = HTTP.post(url_upload, headers, form)
responseJson = JSON.parse(String(httpResponse.body))
fileid = responseJson["id"]
url = "$fileserver_url/file/$uploadid/$fileid/$dataname"
return Dict("status" => httpResponse.status, "uploadid" => uploadid, "fileid" => fileid, "url" => url)
end
# Sender: Send DataFrame tables via smartsend
function test_table_send()
# Create a small DataFrame (will use direct transport)
small_df = DataFrame(
id = 1:10,
name = ["Alice", "Bob", "Charlie", "Diana", "Eve", "Frank", "Grace", "Henry", "Ivy", "Jack"],
score = [95, 88, 92, 85, 90, 78, 95, 88, 92, 85],
category = ["A", "B", "A", "B", "A", "B", "A", "B", "A", "B"]
)
# Create a large DataFrame (will use link transport if > 1MB)
# Generate a larger dataset (~2MB to ensure link transport)
large_ids = 1:50000
large_names = ["User_$i" for i in 1:50000]
large_scores = rand(1:100, 50000)
large_categories = ["Category_$(rand(1:10))" for i in 1:50000]
large_df = DataFrame(
id = large_ids,
name = large_names,
score = large_scores,
category = large_categories
)
# Test data 1: small DataFrame
data1 = ("small_table", small_df, "table")
# Test data 2: large DataFrame
data2 = ("large_table", large_df, "table")
# Use smartsend with table type
# For small DataFrame: will use direct transport (Base64 encoded Arrow IPC)
# For large DataFrame: will use link transport (uploaded to fileserver)
env = NATSBridge.smartsend(
SUBJECT,
[data1, data2], # List of (dataname, data, type) tuples
nats_url = NATS_URL,
fileserver_url = FILESERVER_URL,
fileserverUploadHandler = plik_upload_handler,
size_threshold = 1_000_000, # 1MB threshold
correlation_id = correlation_id,
msg_purpose = "chat",
sender_name = "table_sender",
receiver_name = "",
receiver_id = "",
reply_to = "",
reply_to_msg_id = ""
)
log_trace("Sent message with $(length(env.payloads)) payloads")
# Log transport type for each payload
for (i, payload) in enumerate(env.payloads)
log_trace("Payload $i ('$payload.dataname'):")
log_trace(" Transport: $(payload.transport)")
log_trace(" Type: $(payload.type)")
log_trace(" Size: $(payload.size) bytes")
log_trace(" Encoding: $(payload.encoding)")
if payload.transport == "link"
log_trace(" URL: $(payload.data)")
end
end
end
# Run the test
println("Starting DataFrame table transport test...")
println("Correlation ID: $correlation_id")
# Run sender
println("start smartsend for tables")
test_table_send()
println("Test completed.")