This commit is contained in:
2026-02-11 07:14:21 +07:00
parent 615c537552
commit 68c0aa42ee
3 changed files with 3 additions and 217 deletions

View File

@@ -1,66 +0,0 @@
#!/usr/bin/env julia
# Scenario 3: Julia-to-Julia Service Communication
# Tests bi-directional communication between two Julia services
using NATS
using Arrow
using DataFrames
using JSON3
using UUIDs
# Include the bridge module
include("../src/julia_bridge.jl")
using .BiDirectionalBridge
# Configuration
const SUBJECT1 = "julia_to_js"
const SUBJECT2 = "js_to_julia"
const RESPONSE_SUBJECT = "response"
const NATS_URL = "nats://localhost:4222"
# Create correlation ID for tracing
correlation_id = string(uuid4())
# Julia-to-Julia Test: Large Arrow Table
function test_julia_to_julia_large_table()
conn = NATS.Connection(NATS_URL)
try
# Subscriber on SUBJECT2 to receive data from Julia sender
NATS.subscribe(conn, SUBJECT2) do msg
log_trace("[$(Dates.now())] Received on $SUBJECT2")
# Use SmartReceive to handle the data
result = SmartReceive(msg)
# Check transport type
if result.envelope.transport == "direct"
log_trace("Received direct transport with $(length(result.data)) bytes")
else
# For link transport, result.data is the URL
log_trace("Received link transport at $(result.data)")
end
# Send response back
response = Dict(
"status" => "Processed",
"correlation_id" => result.envelope.correlation_id,
"timestamp" => Dates.now()
)
NATS.publish(conn, RESPONSE_SUBJECT, JSON3.stringify(response))
end
# Keep listening
sleep(5)
finally
NATS.close(conn)
end
end
# Helper: Log with correlation ID
function log_trace(message)
timestamp = Dates.now()
println("[$timestamp] [Correlation: $correlation_id] $message")
end
# Run the test
test_julia_to_julia_large_table()

View File

@@ -1,148 +0,0 @@
# Test Scenarios for Bi-Directional Data Bridge
## Scenario 1: Command & Control (Small JSON)
Tests small JSON payloads (< 1MB) sent directly via NATS.
### Julia (Receiver)
```julia
using NATS
using JSON3
# Subscribe to control subject
subscribe(nats, "control") do msg
env = MessageEnvelope(String(msg.data))
# Parse JSON payload
config = JSON3.read(env.payload)
# Execute simulation with parameters
step_size = config.step_size
iterations = config.iterations
# Send acknowledgment
response = Dict("status" => "Running", "correlation_id" => env.correlation_id)
publish(nats, "control_response", JSON3.stringify(response))
end
```
### JavaScript (Sender)
```javascript
const { SmartSend } = require('./js_bridge');
// Create small JSON config
const config = {
step_size: 0.01,
iterations: 1000
};
// Send via SmartSend with type="json"
await SmartSend("control", config, "json");
```
## Scenario 2: Deep Dive Analysis (Large Arrow Table)
Tests large Arrow tables (> 1MB) sent via HTTP fileserver.
### Julia (Sender)
```julia
using Arrow
using DataFrames
# Create large DataFrame (500MB, 10 million rows)
df = DataFrame(
id = 1:10_000_000,
value = rand(10_000_000),
category = rand(["A", "B", "C"], 10_000_000)
)
# Convert to Arrow IPC stream and send
await SmartSend("analysis_results", df, "table");
```
### JavaScript (Receiver)
```javascript
const { SmartReceive } = require('./js_bridge');
// Receive message with URL
const result = await SmartReceive(msg);
// Fetch data from HTTP server
const table = result.data;
// Load into Perspective.js or D3
// Use table data for visualization
```
## Scenario 3: Live Binary Processing
Tests binary data (binary) sent from JS to Julia for FFT/transcription.
### JavaScript (Sender)
```javascript
const { SmartSend } = require('./js_bridge');
// Capture binary chunk (2 seconds, 44.1kHz, 1 channel)
const binaryData = await navigator.mediaDevices.getUserMedia({ binary: true });
// Send as binary with metadata headers
await SmartSend("binary_input", binaryData, "binary", {
metadata: {
sample_rate: 44100,
channels: 1
}
});
```
### Julia (Receiver)
```julia
using WAV
using DSP
# Receive binary data
function process_binary(data)
# Perform FFT or AI transcription
spectrum = fft(data)
# Send results back (JSON + Arrow table)
results = Dict("transcription" => "sample text", "spectrum" => spectrum)
await SmartSend("binary_output", results, "json")
end
```
## Scenario 4: Catch-Up (JetStream)
Tests temporal decoupling with NATS JetStream.
### Julia (Producer)
```julia
# Publish to JetStream
using NATS
function publish_health_status(nats)
jetstream = JetStream(nats, "health_updates")
while true
status = Dict("cpu" => rand(), "memory" => rand())
publish(jetstream, "health", status)
sleep(5) # Every 5 seconds
end
end
```
### JavaScript (Consumer)
```javascript
const { connect } = require('nats');
const nc = await connect({ servers: ['nats://localhost:4222'] });
const js = nc.jetstream();
// Request replay from last 10 minutes
const consumer = await js.pullSubscribe("health", {
durable_name: "catchup",
max_batch: 100,
max_ack_wait: 30000
});
// Process historical and real-time messages
for await (const msg of consumer) {
const result = await SmartReceive(msg);
// Process the data
msg.ack();
}

View File

@@ -94,10 +94,10 @@ function test_large_binary_receive()
# Verify file size
original_size = length(read(LARGE_FILE_PATH))
if file_size == original_size
log_trace("SUCCESS: File size matches! Original: $original_size bytes")
if file_size == result.envelope.metadata["content_length"]
log_trace("SUCCESS: File size matches! Original: $(result.envelope.metadata["content_length"]) bytes")
else
log_trace("WARNING: File size mismatch! Original: $original_size, Received: $file_size")
log_trace("WARNING: File size mismatch! Original: $(result.envelope.metadata["content_length"]), Received: $file_size")
end
end
end