update
This commit is contained in:
79
test/test_js_to_js_dict_receiver.js
Normal file
79
test/test_js_to_js_dict_receiver.js
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for Dictionary transport testing
|
||||
// Tests receiving 1 large and 1 small Dictionaries via direct and link transport
|
||||
// Uses NATSBridge.js smartreceive with "dictionary" type
|
||||
|
||||
const { smartreceive, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_dict_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] ${message}`);
|
||||
}
|
||||
|
||||
// Receiver: Listen for messages and verify Dictionary handling
|
||||
async function test_dict_receive() {
|
||||
// Connect to NATS
|
||||
const { connect } = require('nats');
|
||||
const nc = await connect({ servers: [NATS_URL] });
|
||||
|
||||
// Subscribe to the subject
|
||||
const sub = nc.subscribe(SUBJECT);
|
||||
|
||||
for await (const msg of sub) {
|
||||
log_trace(`Received message on ${msg.subject}`);
|
||||
|
||||
// Use NATSBridge.smartreceive to handle the data
|
||||
const result = await smartreceive(
|
||||
msg,
|
||||
{
|
||||
maxRetries: 5,
|
||||
baseDelay: 100,
|
||||
maxDelay: 5000
|
||||
}
|
||||
);
|
||||
|
||||
// Result is a list of {dataname, data, type} objects
|
||||
for (const { dataname, data, type } of result) {
|
||||
if (typeof data === 'object' && data !== null && !Array.isArray(data)) {
|
||||
log_trace(`Received Dictionary '${dataname}' of type ${type}`);
|
||||
|
||||
// Display dictionary contents
|
||||
console.log(" Contents:");
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
console.log(` ${key} => ${value}`);
|
||||
}
|
||||
|
||||
// Save to JSON file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.json`;
|
||||
const json_str = JSON.stringify(data, null, 2);
|
||||
fs.writeFileSync(output_path, json_str);
|
||||
log_trace(`Saved Dictionary to ${output_path}`);
|
||||
} else {
|
||||
log_trace(`Received unexpected data type for '${dataname}': ${typeof data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep listening for 10 seconds
|
||||
setTimeout(() => {
|
||||
nc.close();
|
||||
process.exit(0);
|
||||
}, 120000);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting Dictionary transport test...");
|
||||
console.log("Note: This receiver will wait for messages from the sender.");
|
||||
console.log("Run test_js_to_js_dict_sender.js first to send test data.");
|
||||
|
||||
// Run receiver
|
||||
console.log("testing smartreceive");
|
||||
test_dict_receive();
|
||||
|
||||
console.log("Test completed.");
|
||||
164
test/test_js_to_js_dict_sender.js
Normal file
164
test/test_js_to_js_dict_sender.js
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for Dictionary transport testing
|
||||
// Tests sending 1 large and 1 small Dictionaries via direct and link transport
|
||||
// Uses NATSBridge.js smartsend with "dictionary" type
|
||||
|
||||
const { smartsend, uuid4, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_dict_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
const FILESERVER_URL = "http://192.168.88.104:8080";
|
||||
|
||||
// Create correlation ID for tracing
|
||||
const correlation_id = uuid4();
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
||||
}
|
||||
|
||||
// File upload handler for plik server
|
||||
async function plik_upload_handler(fileserver_url, dataname, data, correlation_id) {
|
||||
// Get upload ID
|
||||
const url_getUploadID = `${fileserver_url}/upload`;
|
||||
const headers = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
const body = JSON.stringify({ OneShot: true });
|
||||
|
||||
let response = await fetch(url_getUploadID, {
|
||||
method: "POST",
|
||||
headers: headers,
|
||||
body: body
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get upload ID: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
const uploadid = responseJson.id;
|
||||
const uploadtoken = responseJson.uploadToken;
|
||||
|
||||
// Upload file
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, dataname);
|
||||
|
||||
response = await fetch(`${fileserver_url}/file/${uploadid}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-UploadToken": uploadtoken
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to upload file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const fileResponseJson = await response.json();
|
||||
const fileid = fileResponseJson.id;
|
||||
|
||||
const url = `${fileserver_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
uploadid: uploadid,
|
||||
fileid: fileid,
|
||||
url: url
|
||||
};
|
||||
}
|
||||
|
||||
// Sender: Send Dictionaries via smartsend
|
||||
async function test_dict_send() {
|
||||
// Create a small Dictionary (will use direct transport)
|
||||
const small_dict = {
|
||||
name: "Alice",
|
||||
age: 30,
|
||||
scores: [95, 88, 92],
|
||||
metadata: {
|
||||
height: 155,
|
||||
weight: 55
|
||||
}
|
||||
};
|
||||
|
||||
// Create a large Dictionary (will use link transport if > 1MB)
|
||||
const large_dict_ids = [];
|
||||
const large_dict_names = [];
|
||||
const large_dict_scores = [];
|
||||
const large_dict_categories = [];
|
||||
|
||||
for (let i = 0; i < 50000; i++) {
|
||||
large_dict_ids.push(i + 1);
|
||||
large_dict_names.push(`User_${i}`);
|
||||
large_dict_scores.push(Math.floor(Math.random() * 100) + 1);
|
||||
large_dict_categories.push(`Category_${Math.floor(Math.random() * 10) + 1}`);
|
||||
}
|
||||
|
||||
const large_dict = {
|
||||
ids: large_dict_ids,
|
||||
names: large_dict_names,
|
||||
scores: large_dict_scores,
|
||||
categories: large_dict_categories,
|
||||
metadata: {
|
||||
source: "test_generator",
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
|
||||
// Test data 1: small Dictionary
|
||||
const data1 = { dataname: "small_dict", data: small_dict, type: "dictionary" };
|
||||
|
||||
// Test data 2: large Dictionary
|
||||
const data2 = { dataname: "large_dict", data: large_dict, type: "dictionary" };
|
||||
|
||||
// Use smartsend with dictionary type
|
||||
// For small Dictionary: will use direct transport (JSON encoded)
|
||||
// For large Dictionary: will use link transport (uploaded to fileserver)
|
||||
const env = await smartsend(
|
||||
SUBJECT,
|
||||
[data1, data2],
|
||||
{
|
||||
natsUrl: NATS_URL,
|
||||
fileserverUrl: FILESERVER_URL,
|
||||
fileserverUploadHandler: plik_upload_handler,
|
||||
sizeThreshold: 1_000_000,
|
||||
correlationId: correlation_id,
|
||||
msgPurpose: "chat",
|
||||
senderName: "dict_sender",
|
||||
receiverName: "",
|
||||
receiverId: "",
|
||||
replyTo: "",
|
||||
replyToMsgId: ""
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Sent message with ${env.payloads.length} payloads`);
|
||||
|
||||
// Log transport type for each payload
|
||||
for (let i = 0; i < env.payloads.length; i++) {
|
||||
const payload = env.payloads[i];
|
||||
log_trace(`Payload ${i + 1} ('${payload.dataname}'):`);
|
||||
log_trace(` Transport: ${payload.transport}`);
|
||||
log_trace(` Type: ${payload.type}`);
|
||||
log_trace(` Size: ${payload.size} bytes`);
|
||||
log_trace(` Encoding: ${payload.encoding}`);
|
||||
|
||||
if (payload.transport === "link") {
|
||||
log_trace(` URL: ${payload.data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting Dictionary transport test...");
|
||||
console.log(`Correlation ID: ${correlation_id}`);
|
||||
|
||||
// Run sender
|
||||
console.log("start smartsend for dictionaries");
|
||||
test_dict_send();
|
||||
|
||||
console.log("Test completed.");
|
||||
70
test/test_js_to_js_file_receiver.js
Normal file
70
test/test_js_to_js_file_receiver.js
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for large payload testing using binary transport
|
||||
// Tests receiving a large file (> 1MB) via smartsend with binary type
|
||||
|
||||
const { smartreceive, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] ${message}`);
|
||||
}
|
||||
|
||||
// Receiver: Listen for messages and verify large payload handling
|
||||
async function test_large_binary_receive() {
|
||||
// Connect to NATS
|
||||
const { connect } = require('nats');
|
||||
const nc = await connect({ servers: [NATS_URL] });
|
||||
|
||||
// Subscribe to the subject
|
||||
const sub = nc.subscribe(SUBJECT);
|
||||
|
||||
for await (const msg of sub) {
|
||||
log_trace(`Received message on ${msg.subject}`);
|
||||
|
||||
// Use NATSBridge.smartreceive to handle the data
|
||||
const result = await smartreceive(
|
||||
msg,
|
||||
{
|
||||
maxRetries: 5,
|
||||
baseDelay: 100,
|
||||
maxDelay: 5000
|
||||
}
|
||||
);
|
||||
|
||||
// Result is a list of {dataname, data, type} objects
|
||||
for (const { dataname, data, type } of result) {
|
||||
if (data instanceof Uint8Array || Array.isArray(data)) {
|
||||
const file_size = data.length;
|
||||
log_trace(`Received ${file_size} bytes of binary data for '${dataname}' of type ${type}`);
|
||||
|
||||
// Save received data to a test file
|
||||
const fs = require('fs');
|
||||
const output_path = `./new_${dataname}`;
|
||||
fs.writeFileSync(output_path, Buffer.from(data));
|
||||
log_trace(`Saved received data to ${output_path}`);
|
||||
} else {
|
||||
log_trace(`Received unexpected data type for '${dataname}': ${typeof data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep listening for 10 seconds
|
||||
setTimeout(() => {
|
||||
nc.close();
|
||||
process.exit(0);
|
||||
}, 120000);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting large binary payload test...");
|
||||
|
||||
// Run receiver
|
||||
console.log("testing smartreceive");
|
||||
test_large_binary_receive();
|
||||
|
||||
console.log("Test completed.");
|
||||
143
test/test_js_to_js_file_sender.js
Normal file
143
test/test_js_to_js_file_sender.js
Normal file
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for large payload testing using binary transport
|
||||
// Tests sending a large file (> 1MB) via smartsend with binary type
|
||||
|
||||
const { smartsend, uuid4, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
const FILESERVER_URL = "http://192.168.88.104:8080";
|
||||
|
||||
// Create correlation ID for tracing
|
||||
const correlation_id = uuid4();
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
||||
}
|
||||
|
||||
// File upload handler for plik server
|
||||
async function plik_upload_handler(fileserver_url, dataname, data, correlation_id) {
|
||||
log_trace(correlation_id, `Uploading ${dataname} to fileserver: ${fileserver_url}`);
|
||||
|
||||
// Step 1: Get upload ID and token
|
||||
const url_getUploadID = `${fileserver_url}/upload`;
|
||||
const headers = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
const body = JSON.stringify({ OneShot: true });
|
||||
|
||||
let response = await fetch(url_getUploadID, {
|
||||
method: "POST",
|
||||
headers: headers,
|
||||
body: body
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get upload ID: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
const uploadid = responseJson.id;
|
||||
const uploadtoken = responseJson.uploadToken;
|
||||
|
||||
// Step 2: Upload file data
|
||||
const url_upload = `${fileserver_url}/file/${uploadid}`;
|
||||
|
||||
// Create multipart form data
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, dataname);
|
||||
|
||||
response = await fetch(url_upload, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-UploadToken": uploadtoken
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to upload file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const fileResponseJson = await response.json();
|
||||
const fileid = fileResponseJson.id;
|
||||
|
||||
// Build the download URL
|
||||
const url = `${fileserver_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
||||
|
||||
log_trace(correlation_id, `Uploaded to URL: ${url}`);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
uploadid: uploadid,
|
||||
fileid: fileid,
|
||||
url: url
|
||||
};
|
||||
}
|
||||
|
||||
// Sender: Send large binary file via smartsend
|
||||
async function test_large_binary_send() {
|
||||
// Read the large file as binary data
|
||||
const fs = require('fs');
|
||||
|
||||
// Test data 1
|
||||
const file_path1 = './testFile_large.zip';
|
||||
const file_data1 = fs.readFileSync(file_path1);
|
||||
const filename1 = 'testFile_large.zip';
|
||||
const data1 = { dataname: filename1, data: file_data1, type: "binary" };
|
||||
|
||||
// Test data 2
|
||||
const file_path2 = './testFile_small.zip';
|
||||
const file_data2 = fs.readFileSync(file_path2);
|
||||
const filename2 = 'testFile_small.zip';
|
||||
const data2 = { dataname: filename2, data: file_data2, type: "binary" };
|
||||
|
||||
// Use smartsend with binary type - will automatically use link transport
|
||||
// if file size exceeds the threshold (1MB by default)
|
||||
const env = await smartsend(
|
||||
SUBJECT,
|
||||
[data1, data2],
|
||||
{
|
||||
natsUrl: NATS_URL,
|
||||
fileserverUrl: FILESERVER_URL,
|
||||
fileserverUploadHandler: plik_upload_handler,
|
||||
sizeThreshold: 1_000_000,
|
||||
correlationId: correlation_id,
|
||||
msgPurpose: "chat",
|
||||
senderName: "sender",
|
||||
receiverName: "",
|
||||
receiverId: "",
|
||||
replyTo: "",
|
||||
replyToMsgId: ""
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Sent message with transport: ${env.payloads[0].transport}`);
|
||||
log_trace(`Envelope type: ${env.payloads[0].type}`);
|
||||
|
||||
// Check if link transport was used
|
||||
if (env.payloads[0].transport === "link") {
|
||||
log_trace("Using link transport - file uploaded to HTTP server");
|
||||
log_trace(`URL: ${env.payloads[0].data}`);
|
||||
} else {
|
||||
log_trace("Using direct transport - payload sent via NATS");
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting large binary payload test...");
|
||||
console.log(`Correlation ID: ${correlation_id}`);
|
||||
|
||||
// Run sender first
|
||||
console.log("start smartsend");
|
||||
test_large_binary_send();
|
||||
|
||||
// Run receiver
|
||||
// console.log("testing smartreceive");
|
||||
// test_large_binary_receive();
|
||||
|
||||
console.log("Test completed.");
|
||||
276
test/test_js_to_js_mix_payload_sender.js
Normal file
276
test/test_js_to_js_mix_payload_sender.js
Normal file
@@ -0,0 +1,276 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for mixed-content message testing
|
||||
// Tests sending a mix of text, json, table, image, audio, video, and binary data
|
||||
// from JavaScript serviceA to JavaScript serviceB using NATSBridge.js smartsend
|
||||
//
|
||||
// This test demonstrates that any combination and any number of mixed content
|
||||
// can be sent and received correctly.
|
||||
|
||||
const { smartsend, uuid4, log_trace, _serialize_data } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_mix_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
const FILESERVER_URL = "http://192.168.88.104:8080";
|
||||
|
||||
// Create correlation ID for tracing
|
||||
const correlation_id = uuid4();
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
||||
}
|
||||
|
||||
// File upload handler for plik server
|
||||
async function plik_upload_handler(fileserver_url, dataname, data, correlation_id) {
|
||||
log_trace(correlation_id, `Uploading ${dataname} to fileserver: ${fileserver_url}`);
|
||||
|
||||
// Step 1: Get upload ID and token
|
||||
const url_getUploadID = `${fileserver_url}/upload`;
|
||||
const headers = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
const body = JSON.stringify({ OneShot: true });
|
||||
|
||||
let response = await fetch(url_getUploadID, {
|
||||
method: "POST",
|
||||
headers: headers,
|
||||
body: body
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get upload ID: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
const uploadid = responseJson.id;
|
||||
const uploadtoken = responseJson.uploadToken;
|
||||
|
||||
// Step 2: Upload file data
|
||||
const url_upload = `${fileserver_url}/file/${uploadid}`;
|
||||
|
||||
// Create multipart form data
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, dataname);
|
||||
|
||||
response = await fetch(url_upload, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-UploadToken": uploadtoken
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to upload file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const fileResponseJson = await response.json();
|
||||
const fileid = fileResponseJson.id;
|
||||
|
||||
// Build the download URL
|
||||
const url = `${fileserver_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
||||
|
||||
log_trace(correlation_id, `Uploaded to URL: ${url}`);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
uploadid: uploadid,
|
||||
fileid: fileid,
|
||||
url: url
|
||||
};
|
||||
}
|
||||
|
||||
// Helper: Create sample data for each type
|
||||
function create_sample_data() {
|
||||
// Text data (small - direct transport)
|
||||
const text_data = "Hello! This is a test chat message. 🎉\nHow are you doing today? 😊";
|
||||
|
||||
// Dictionary/JSON data (medium - could be direct or link)
|
||||
const dict_data = {
|
||||
type: "chat",
|
||||
sender: "serviceA",
|
||||
receiver: "serviceB",
|
||||
metadata: {
|
||||
timestamp: new Date().toISOString(),
|
||||
priority: "high",
|
||||
tags: ["urgent", "chat", "test"]
|
||||
},
|
||||
content: {
|
||||
text: "This is a JSON-formatted chat message with nested structure.",
|
||||
format: "markdown",
|
||||
mentions: ["user1", "user2"]
|
||||
}
|
||||
};
|
||||
|
||||
// Table data (small - direct transport) - NOT IMPLEMENTED (requires apache-arrow)
|
||||
// const table_data_small = {...};
|
||||
|
||||
// Table data (large - link transport) - NOT IMPLEMENTED (requires apache-arrow)
|
||||
// const table_data_large = {...};
|
||||
|
||||
// Image data (small binary - direct transport)
|
||||
// Create a simple 10x10 pixel PNG-like data
|
||||
const image_width = 10;
|
||||
const image_height = 10;
|
||||
let image_data = new Uint8Array(128); // PNG header + pixel data
|
||||
// PNG header
|
||||
image_data[0] = 0x89;
|
||||
image_data[1] = 0x50;
|
||||
image_data[2] = 0x4E;
|
||||
image_data[3] = 0x47;
|
||||
image_data[4] = 0x0D;
|
||||
image_data[5] = 0x0A;
|
||||
image_data[6] = 0x1A;
|
||||
image_data[7] = 0x0A;
|
||||
// Simple RGB data (10*10*3 = 300 bytes)
|
||||
for (let i = 0; i < 300; i++) {
|
||||
image_data[i + 8] = 0xFF; // Red pixel
|
||||
}
|
||||
|
||||
// Image data (large - link transport)
|
||||
const large_image_width = 500;
|
||||
const large_image_height = 1000;
|
||||
const large_image_data = new Uint8Array(large_image_width * large_image_height * 3 + 8);
|
||||
// PNG header
|
||||
large_image_data[0] = 0x89;
|
||||
large_image_data[1] = 0x50;
|
||||
large_image_data[2] = 0x4E;
|
||||
large_image_data[3] = 0x47;
|
||||
large_image_data[4] = 0x0D;
|
||||
large_image_data[5] = 0x0A;
|
||||
large_image_data[6] = 0x1A;
|
||||
large_image_data[7] = 0x0A;
|
||||
// Random RGB data
|
||||
for (let i = 0; i < large_image_width * large_image_height * 3; i++) {
|
||||
large_image_data[i + 8] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Audio data (small binary - direct transport)
|
||||
const audio_data = new Uint8Array(100);
|
||||
for (let i = 0; i < 100; i++) {
|
||||
audio_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Audio data (large - link transport)
|
||||
const large_audio_data = new Uint8Array(1_500_000);
|
||||
for (let i = 0; i < 1_500_000; i++) {
|
||||
large_audio_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Video data (small binary - direct transport)
|
||||
const video_data = new Uint8Array(150);
|
||||
for (let i = 0; i < 150; i++) {
|
||||
video_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Video data (large - link transport)
|
||||
const large_video_data = new Uint8Array(1_500_000);
|
||||
for (let i = 0; i < 1_500_000; i++) {
|
||||
large_video_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Binary data (small - direct transport)
|
||||
const binary_data = new Uint8Array(200);
|
||||
for (let i = 0; i < 200; i++) {
|
||||
binary_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
// Binary data (large - link transport)
|
||||
const large_binary_data = new Uint8Array(1_500_000);
|
||||
for (let i = 0; i < 1_500_000; i++) {
|
||||
large_binary_data[i] = Math.floor(Math.random() * 255);
|
||||
}
|
||||
|
||||
return {
|
||||
text_data,
|
||||
dict_data,
|
||||
// table_data_small,
|
||||
// table_data_large,
|
||||
image_data,
|
||||
large_image_data,
|
||||
audio_data,
|
||||
large_audio_data,
|
||||
video_data,
|
||||
large_video_data,
|
||||
binary_data,
|
||||
large_binary_data
|
||||
};
|
||||
}
|
||||
|
||||
// Sender: Send mixed content via smartsend
|
||||
async function test_mix_send() {
|
||||
// Create sample data
|
||||
const { text_data, dict_data, image_data, large_image_data, audio_data, large_audio_data, video_data, large_video_data, binary_data, large_binary_data } = create_sample_data();
|
||||
|
||||
// Create payloads list - mixed content with both small and large data
|
||||
// Small data uses direct transport, large data uses link transport
|
||||
const payloads = [
|
||||
// Small data (direct transport) - text, dictionary
|
||||
{ dataname: "chat_text", data: text_data, type: "text" },
|
||||
{ dataname: "chat_json", data: dict_data, type: "dictionary" },
|
||||
// { dataname: "chat_table_small", data: table_data_small, type: "table" },
|
||||
|
||||
// Large data (link transport) - large image, large audio, large video, large binary
|
||||
// { dataname: "chat_table_large", data: table_data_large, type: "table" },
|
||||
{ dataname: "user_image_large", data: large_image_data, type: "image" },
|
||||
{ dataname: "audio_clip_large", data: large_audio_data, type: "audio" },
|
||||
{ dataname: "video_clip_large", data: large_video_data, type: "video" },
|
||||
{ dataname: "binary_file_large", data: large_binary_data, type: "binary" }
|
||||
];
|
||||
|
||||
// Use smartsend with mixed content
|
||||
const env = await smartsend(
|
||||
SUBJECT,
|
||||
payloads,
|
||||
{
|
||||
natsUrl: NATS_URL,
|
||||
fileserverUrl: FILESERVER_URL,
|
||||
fileserverUploadHandler: plik_upload_handler,
|
||||
sizeThreshold: 1_000_000,
|
||||
correlationId: correlation_id,
|
||||
msgPurpose: "chat",
|
||||
senderName: "mix_sender",
|
||||
receiverName: "",
|
||||
receiverId: "",
|
||||
replyTo: "",
|
||||
replyToMsgId: ""
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Sent message with ${env.payloads.length} payloads`);
|
||||
|
||||
// Log transport type for each payload
|
||||
for (let i = 0; i < env.payloads.length; i++) {
|
||||
const payload = env.payloads[i];
|
||||
log_trace(`Payload ${i + 1} ('${payload.dataname}'):`);
|
||||
log_trace(` Transport: ${payload.transport}`);
|
||||
log_trace(` Type: ${payload.type}`);
|
||||
log_trace(` Size: ${payload.size} bytes`);
|
||||
log_trace(` Encoding: ${payload.encoding}`);
|
||||
|
||||
if (payload.transport === "link") {
|
||||
log_trace(` URL: ${payload.data}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log("\n--- Transport Summary ---");
|
||||
const direct_count = env.payloads.filter(p => p.transport === "direct").length;
|
||||
const link_count = env.payloads.filter(p => p.transport === "link").length;
|
||||
log_trace(`Direct transport: ${direct_count} payloads`);
|
||||
log_trace(`Link transport: ${link_count} payloads`);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting mixed-content transport test...");
|
||||
console.log(`Correlation ID: ${correlation_id}`);
|
||||
|
||||
// Run sender
|
||||
console.log("start smartsend for mixed content");
|
||||
test_mix_send();
|
||||
|
||||
console.log("\nTest completed.");
|
||||
console.log("Note: Run test_js_to_js_mix_receiver.js to receive the messages.");
|
||||
172
test/test_js_to_js_mix_payloads_receiver.js
Normal file
172
test/test_js_to_js_mix_payloads_receiver.js
Normal file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for mixed-content message testing
|
||||
// Tests receiving a mix of text, json, table, image, audio, video, and binary data
|
||||
// from JavaScript serviceA to JavaScript serviceB using NATSBridge.js smartreceive
|
||||
//
|
||||
// This test demonstrates that any combination and any number of mixed content
|
||||
// can be sent and received correctly.
|
||||
|
||||
const { smartreceive, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_mix_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] ${message}`);
|
||||
}
|
||||
|
||||
// Receiver: Listen for messages and verify mixed content handling
|
||||
async function test_mix_receive() {
|
||||
// Connect to NATS
|
||||
const { connect } = require('nats');
|
||||
const nc = await connect({ servers: [NATS_URL] });
|
||||
|
||||
// Subscribe to the subject
|
||||
const sub = nc.subscribe(SUBJECT);
|
||||
|
||||
for await (const msg of sub) {
|
||||
log_trace(`Received message on ${msg.subject}`);
|
||||
|
||||
// Use NATSBridge.smartreceive to handle the data
|
||||
const result = await smartreceive(
|
||||
msg,
|
||||
{
|
||||
maxRetries: 5,
|
||||
baseDelay: 100,
|
||||
maxDelay: 5000
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Received ${result.length} payloads`);
|
||||
|
||||
// Result is a list of {dataname, data, type} objects
|
||||
for (const { dataname, data, type } of result) {
|
||||
log_trace(`\n=== Payload: ${dataname} (type: ${type}) ===`);
|
||||
|
||||
// Handle different data types
|
||||
if (type === "text") {
|
||||
// Text data - should be a String
|
||||
if (typeof data === 'string') {
|
||||
log_trace(` Type: String`);
|
||||
log_trace(` Length: ${data.length} characters`);
|
||||
|
||||
// Display first 200 characters
|
||||
if (data.length > 200) {
|
||||
log_trace(` First 200 chars: ${data.substring(0, 200)}...`);
|
||||
} else {
|
||||
log_trace(` Content: ${data}`);
|
||||
}
|
||||
|
||||
// Save to file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.txt`;
|
||||
fs.writeFileSync(output_path, data);
|
||||
log_trace(` Saved to: ${output_path}`);
|
||||
} else {
|
||||
log_trace(` ERROR: Expected String, got ${typeof data}`);
|
||||
}
|
||||
|
||||
} else if (type === "dictionary") {
|
||||
// Dictionary data - should be an object
|
||||
if (typeof data === 'object' && data !== null && !Array.isArray(data)) {
|
||||
log_trace(` Type: Object`);
|
||||
log_trace(` Keys: ${Object.keys(data).join(', ')}`);
|
||||
|
||||
// Display nested content
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
log_trace(` ${key} => ${value}`);
|
||||
}
|
||||
|
||||
// Save to JSON file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.json`;
|
||||
const json_str = JSON.stringify(data, null, 2);
|
||||
fs.writeFileSync(output_path, json_str);
|
||||
log_trace(` Saved to: ${output_path}`);
|
||||
} else {
|
||||
log_trace(` ERROR: Expected Object, got ${typeof data}`);
|
||||
}
|
||||
|
||||
} else if (type === "table") {
|
||||
// Table data - should be an array of objects (requires apache-arrow)
|
||||
log_trace(` Type: Array (requires apache-arrow for full deserialization)`);
|
||||
if (Array.isArray(data)) {
|
||||
log_trace(` Length: ${data.length} items`);
|
||||
log_trace(` First item: ${JSON.stringify(data[0])}`);
|
||||
} else {
|
||||
log_trace(` ERROR: Expected Array, got ${typeof data}`);
|
||||
}
|
||||
|
||||
} else if (type === "image" || type === "audio" || type === "video" || type === "binary") {
|
||||
// Binary data - should be Uint8Array
|
||||
if (data instanceof Uint8Array || Array.isArray(data)) {
|
||||
log_trace(` Type: Uint8Array (binary)`);
|
||||
log_trace(` Size: ${data.length} bytes`);
|
||||
|
||||
// Save to file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.bin`;
|
||||
fs.writeFileSync(output_path, Buffer.from(data));
|
||||
log_trace(` Saved to: ${output_path}`);
|
||||
} else {
|
||||
log_trace(` ERROR: Expected Uint8Array, got ${typeof data}`);
|
||||
}
|
||||
|
||||
} else {
|
||||
log_trace(` ERROR: Unknown data type '${type}'`);
|
||||
}
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log("\n=== Verification Summary ===");
|
||||
const text_count = result.filter(x => x.type === "text").length;
|
||||
const dict_count = result.filter(x => x.type === "dictionary").length;
|
||||
const table_count = result.filter(x => x.type === "table").length;
|
||||
const image_count = result.filter(x => x.type === "image").length;
|
||||
const audio_count = result.filter(x => x.type === "audio").length;
|
||||
const video_count = result.filter(x => x.type === "video").length;
|
||||
const binary_count = result.filter(x => x.type === "binary").length;
|
||||
|
||||
log_trace(`Text payloads: ${text_count}`);
|
||||
log_trace(`Dictionary payloads: ${dict_count}`);
|
||||
log_trace(`Table payloads: ${table_count}`);
|
||||
log_trace(`Image payloads: ${image_count}`);
|
||||
log_trace(`Audio payloads: ${audio_count}`);
|
||||
log_trace(`Video payloads: ${video_count}`);
|
||||
log_trace(`Binary payloads: ${binary_count}`);
|
||||
|
||||
// Print transport type info for each payload if available
|
||||
console.log("\n=== Payload Details ===");
|
||||
for (const { dataname, data, type } of result) {
|
||||
if (["image", "audio", "video", "binary"].includes(type)) {
|
||||
log_trace(`${dataname}: ${data.length} bytes (binary)`);
|
||||
} else if (type === "table") {
|
||||
log_trace(`${dataname}: ${data.length} items (Array)`);
|
||||
} else if (type === "dictionary") {
|
||||
log_trace(`${dataname}: ${JSON.stringify(data).length} bytes (Object)`);
|
||||
} else if (type === "text") {
|
||||
log_trace(`${dataname}: ${data.length} characters (String)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep listening for 2 minutes
|
||||
setTimeout(() => {
|
||||
nc.close();
|
||||
process.exit(0);
|
||||
}, 120000);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting mixed-content transport test...");
|
||||
console.log("Note: This receiver will wait for messages from the sender.");
|
||||
console.log("Run test_js_to_js_mix_sender.js first to send test data.");
|
||||
|
||||
// Run receiver
|
||||
console.log("\ntesting smartreceive for mixed content");
|
||||
test_mix_receive();
|
||||
|
||||
console.log("\nTest completed.");
|
||||
86
test/test_js_to_js_table_receiver.js
Normal file
86
test/test_js_to_js_table_receiver.js
Normal file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for Table transport testing
|
||||
// Tests receiving 1 large and 1 small Tables via direct and link transport
|
||||
// Uses NATSBridge.js smartreceive with "table" type
|
||||
//
|
||||
// Note: This test requires the apache-arrow library to deserialize table data.
|
||||
// The JavaScript implementation uses apache-arrow for Arrow IPC deserialization.
|
||||
|
||||
const { smartreceive, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_table_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] ${message}`);
|
||||
}
|
||||
|
||||
// Receiver: Listen for messages and verify Table handling
|
||||
async function test_table_receive() {
|
||||
// Connect to NATS
|
||||
const { connect } = require('nats');
|
||||
const nc = await connect({ servers: [NATS_URL] });
|
||||
|
||||
// Subscribe to the subject
|
||||
const sub = nc.subscribe(SUBJECT);
|
||||
|
||||
for await (const msg of sub) {
|
||||
log_trace(`Received message on ${msg.subject}`);
|
||||
|
||||
// Use NATSBridge.smartreceive to handle the data
|
||||
const result = await smartreceive(
|
||||
msg,
|
||||
{
|
||||
maxRetries: 5,
|
||||
baseDelay: 100,
|
||||
maxDelay: 5000
|
||||
}
|
||||
);
|
||||
|
||||
// Result is a list of {dataname, data, type} objects
|
||||
for (const { dataname, data, type } of result) {
|
||||
if (Array.isArray(data)) {
|
||||
log_trace(`Received Table '${dataname}' of type ${type}`);
|
||||
|
||||
// Display table contents
|
||||
console.log(` Dimensions: ${data.length} rows x ${data.length > 0 ? Object.keys(data[0]).length : 0} columns`);
|
||||
console.log(` Columns: ${data.length > 0 ? Object.keys(data[0]).join(', ') : ''}`);
|
||||
|
||||
// Display first few rows
|
||||
console.log(` First 5 rows:`);
|
||||
for (let i = 0; i < Math.min(5, data.length); i++) {
|
||||
console.log(` Row ${i}: ${JSON.stringify(data[i])}`);
|
||||
}
|
||||
|
||||
// Save to JSON file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.json`;
|
||||
const json_str = JSON.stringify(data, null, 2);
|
||||
fs.writeFileSync(output_path, json_str);
|
||||
log_trace(`Saved Table to ${output_path}`);
|
||||
} else {
|
||||
log_trace(`Received unexpected data type for '${dataname}': ${typeof data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep listening for 10 seconds
|
||||
setTimeout(() => {
|
||||
nc.close();
|
||||
process.exit(0);
|
||||
}, 120000);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting Table transport test...");
|
||||
console.log("Note: This receiver will wait for messages from the sender.");
|
||||
console.log("Run test_js_to_js_table_sender.js first to send test data.");
|
||||
|
||||
// Run receiver
|
||||
console.log("testing smartreceive");
|
||||
test_table_receive();
|
||||
|
||||
console.log("Test completed.");
|
||||
164
test/test_js_to_js_table_sender.js
Normal file
164
test/test_js_to_js_table_sender.js
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for Table transport testing
|
||||
// Tests sending 1 large and 1 small Tables via direct and link transport
|
||||
// Uses NATSBridge.js smartsend with "table" type
|
||||
//
|
||||
// Note: This test requires the apache-arrow library to serialize/deserialize table data.
|
||||
// The JavaScript implementation uses apache-arrow for Arrow IPC serialization.
|
||||
|
||||
const { smartsend, uuid4, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_table_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
const FILESERVER_URL = "http://192.168.88.104:8080";
|
||||
|
||||
// Create correlation ID for tracing
|
||||
const correlation_id = uuid4();
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
||||
}
|
||||
|
||||
// File upload handler for plik server
|
||||
async function plik_upload_handler(fileserver_url, dataname, data, correlation_id) {
|
||||
log_trace(correlation_id, `Uploading ${dataname} to fileserver: ${fileserver_url}`);
|
||||
|
||||
// Step 1: Get upload ID and token
|
||||
const url_getUploadID = `${fileserver_url}/upload`;
|
||||
const headers = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
const body = JSON.stringify({ OneShot: true });
|
||||
|
||||
let response = await fetch(url_getUploadID, {
|
||||
method: "POST",
|
||||
headers: headers,
|
||||
body: body
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get upload ID: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
const uploadid = responseJson.id;
|
||||
const uploadtoken = responseJson.uploadToken;
|
||||
|
||||
// Step 2: Upload file data
|
||||
const url_upload = `${fileserver_url}/file/${uploadid}`;
|
||||
|
||||
// Create multipart form data
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, dataname);
|
||||
|
||||
response = await fetch(url_upload, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-UploadToken": uploadtoken
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to upload file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const fileResponseJson = await response.json();
|
||||
const fileid = fileResponseJson.id;
|
||||
|
||||
// Build the download URL
|
||||
const url = `${fileserver_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
||||
|
||||
log_trace(correlation_id, `Uploaded to URL: ${url}`);
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
uploadid: uploadid,
|
||||
fileid: fileid,
|
||||
url: url
|
||||
};
|
||||
}
|
||||
|
||||
// Sender: Send Tables via smartsend
|
||||
async function test_table_send() {
|
||||
// Note: This test requires apache-arrow library to create Arrow IPC data.
|
||||
// For now, we'll use a simple array of objects as table data.
|
||||
// In production, you would use the apache-arrow library to create Arrow IPC data.
|
||||
|
||||
// Create a small Table (will use direct transport)
|
||||
const small_table = [
|
||||
{ id: 1, name: "Alice", score: 95 },
|
||||
{ id: 2, name: "Bob", score: 88 },
|
||||
{ id: 3, name: "Charlie", score: 92 }
|
||||
];
|
||||
|
||||
// Create a large Table (will use link transport if > 1MB)
|
||||
// Generate a larger dataset (~2MB to ensure link transport)
|
||||
const large_table = [];
|
||||
for (let i = 0; i < 50000; i++) {
|
||||
large_table.push({
|
||||
id: i,
|
||||
message: `msg_${i}`,
|
||||
sender: `sender_${i}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
priority: Math.floor(Math.random() * 3) + 1
|
||||
});
|
||||
}
|
||||
|
||||
// Test data 1: small Table
|
||||
const data1 = { dataname: "small_table", data: small_table, type: "table" };
|
||||
|
||||
// Test data 2: large Table
|
||||
const data2 = { dataname: "large_table", data: large_table, type: "table" };
|
||||
|
||||
// Use smartsend with table type
|
||||
// For small Table: will use direct transport (Arrow IPC encoded)
|
||||
// For large Table: will use link transport (uploaded to fileserver)
|
||||
const env = await smartsend(
|
||||
SUBJECT,
|
||||
[data1, data2],
|
||||
{
|
||||
natsUrl: NATS_URL,
|
||||
fileserverUrl: FILESERVER_URL,
|
||||
fileserverUploadHandler: plik_upload_handler,
|
||||
sizeThreshold: 1_000_000,
|
||||
correlationId: correlation_id,
|
||||
msgPurpose: "chat",
|
||||
senderName: "table_sender",
|
||||
receiverName: "",
|
||||
receiverId: "",
|
||||
replyTo: "",
|
||||
replyToMsgId: ""
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Sent message with ${env.payloads.length} payloads`);
|
||||
|
||||
// Log transport type for each payload
|
||||
for (let i = 0; i < env.payloads.length; i++) {
|
||||
const payload = env.payloads[i];
|
||||
log_trace(`Payload ${i + 1} ('${payload.dataname}'):`);
|
||||
log_trace(` Transport: ${payload.transport}`);
|
||||
log_trace(` Type: ${payload.type}`);
|
||||
log_trace(` Size: ${payload.size} bytes`);
|
||||
log_trace(` Encoding: ${payload.encoding}`);
|
||||
|
||||
if (payload.transport === "link") {
|
||||
log_trace(` URL: ${payload.data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting Table transport test...");
|
||||
console.log(`Correlation ID: ${correlation_id}`);
|
||||
|
||||
// Run sender
|
||||
console.log("start smartsend for tables");
|
||||
test_table_send();
|
||||
|
||||
console.log("Test completed.");
|
||||
80
test/test_js_to_js_text_receiver.js
Normal file
80
test/test_js_to_js_text_receiver.js
Normal file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for text transport testing
|
||||
// Tests receiving 1 large and 1 small text from JavaScript serviceA to JavaScript serviceB
|
||||
// Uses NATSBridge.js smartreceive with "text" type
|
||||
|
||||
const { smartreceive, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_text_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] ${message}`);
|
||||
}
|
||||
|
||||
// Receiver: Listen for messages and verify text handling
|
||||
async function test_text_receive() {
|
||||
// Connect to NATS
|
||||
const { connect } = require('nats');
|
||||
const nc = await connect({ servers: [NATS_URL] });
|
||||
|
||||
// Subscribe to the subject
|
||||
const sub = nc.subscribe(SUBJECT);
|
||||
|
||||
for await (const msg of sub) {
|
||||
log_trace(`Received message on ${msg.subject}`);
|
||||
|
||||
// Use NATSBridge.smartreceive to handle the data
|
||||
const result = await smartreceive(
|
||||
msg,
|
||||
{
|
||||
maxRetries: 5,
|
||||
baseDelay: 100,
|
||||
maxDelay: 5000
|
||||
}
|
||||
);
|
||||
|
||||
// Result is a list of {dataname, data, type} objects
|
||||
for (const { dataname, data, type } of result) {
|
||||
if (typeof data === 'string') {
|
||||
log_trace(`Received text '${dataname}' of type ${type}`);
|
||||
log_trace(` Length: ${data.length} characters`);
|
||||
|
||||
// Display first 100 characters
|
||||
if (data.length > 100) {
|
||||
log_trace(` First 100 characters: ${data.substring(0, 100)}...`);
|
||||
} else {
|
||||
log_trace(` Content: ${data}`);
|
||||
}
|
||||
|
||||
// Save to file
|
||||
const fs = require('fs');
|
||||
const output_path = `./received_${dataname}.txt`;
|
||||
fs.writeFileSync(output_path, data);
|
||||
log_trace(`Saved text to ${output_path}`);
|
||||
} else {
|
||||
log_trace(`Received unexpected data type for '${dataname}': ${typeof data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep listening for 10 seconds
|
||||
setTimeout(() => {
|
||||
nc.close();
|
||||
process.exit(0);
|
||||
}, 120000);
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting text transport test...");
|
||||
console.log("Note: This receiver will wait for messages from the sender.");
|
||||
console.log("Run test_js_to_js_text_sender.js first to send test data.");
|
||||
|
||||
// Run receiver
|
||||
console.log("testing smartreceive for text");
|
||||
test_text_receive();
|
||||
|
||||
console.log("Test completed.");
|
||||
140
test/test_js_to_js_text_sender.js
Normal file
140
test/test_js_to_js_text_sender.js
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/usr/bin/env node
|
||||
// Test script for text transport testing
|
||||
// Tests sending 1 large and 1 small text from JavaScript serviceA to JavaScript serviceB
|
||||
// Uses NATSBridge.js smartsend with "text" type
|
||||
|
||||
const { smartsend, uuid4, log_trace } = require('./src/NATSBridge');
|
||||
|
||||
// Configuration
|
||||
const SUBJECT = "/NATSBridge_text_test";
|
||||
const NATS_URL = "nats.yiem.cc";
|
||||
const FILESERVER_URL = "http://192.168.88.104:8080";
|
||||
|
||||
// Create correlation ID for tracing
|
||||
const correlation_id = uuid4();
|
||||
|
||||
// Helper: Log with correlation ID
|
||||
function log_trace(message) {
|
||||
const timestamp = new Date().toISOString();
|
||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
||||
}
|
||||
|
||||
// File upload handler for plik server
|
||||
async function plik_upload_handler(fileserver_url, dataname, data, correlation_id) {
|
||||
// Get upload ID
|
||||
const url_getUploadID = `${fileserver_url}/upload`;
|
||||
const headers = {
|
||||
"Content-Type": "application/json"
|
||||
};
|
||||
const body = JSON.stringify({ OneShot: true });
|
||||
|
||||
let response = await fetch(url_getUploadID, {
|
||||
method: "POST",
|
||||
headers: headers,
|
||||
body: body
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to get upload ID: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const responseJson = await response.json();
|
||||
const uploadid = responseJson.id;
|
||||
const uploadtoken = responseJson.uploadToken;
|
||||
|
||||
// Upload file
|
||||
const formData = new FormData();
|
||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
||||
formData.append("file", blob, dataname);
|
||||
|
||||
response = await fetch(`${fileserver_url}/file/${uploadid}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"X-UploadToken": uploadtoken
|
||||
},
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to upload file: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const fileResponseJson = await response.json();
|
||||
const fileid = fileResponseJson.id;
|
||||
|
||||
const url = `${fileserver_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
||||
|
||||
return {
|
||||
status: response.status,
|
||||
uploadid: uploadid,
|
||||
fileid: fileid,
|
||||
url: url
|
||||
};
|
||||
}
|
||||
|
||||
// Sender: Send text via smartsend
|
||||
async function test_text_send() {
|
||||
// Create a small text (will use direct transport)
|
||||
const small_text = "Hello, this is a small text message. Testing direct transport via NATS.";
|
||||
|
||||
// Create a large text (will use link transport if > 1MB)
|
||||
// Generate a larger text (~2MB to ensure link transport)
|
||||
const large_text_lines = [];
|
||||
for (let i = 0; i < 50000; i++) {
|
||||
large_text_lines.push(`Line ${i}: This is a sample text line with some content to pad the size. `);
|
||||
}
|
||||
const large_text = large_text_lines.join("");
|
||||
|
||||
// Test data 1: small text
|
||||
const data1 = { dataname: "small_text", data: small_text, type: "text" };
|
||||
|
||||
// Test data 2: large text
|
||||
const data2 = { dataname: "large_text", data: large_text, type: "text" };
|
||||
|
||||
// Use smartsend with text type
|
||||
// For small text: will use direct transport (Base64 encoded UTF-8)
|
||||
// For large text: will use link transport (uploaded to fileserver)
|
||||
const env = await smartsend(
|
||||
SUBJECT,
|
||||
[data1, data2],
|
||||
{
|
||||
natsUrl: NATS_URL,
|
||||
fileserverUrl: FILESERVER_URL,
|
||||
fileserverUploadHandler: plik_upload_handler,
|
||||
sizeThreshold: 1_000_000,
|
||||
correlationId: correlation_id,
|
||||
msgPurpose: "chat",
|
||||
senderName: "text_sender",
|
||||
receiverName: "",
|
||||
receiverId: "",
|
||||
replyTo: "",
|
||||
replyToMsgId: ""
|
||||
}
|
||||
);
|
||||
|
||||
log_trace(`Sent message with ${env.payloads.length} payloads`);
|
||||
|
||||
// Log transport type for each payload
|
||||
for (let i = 0; i < env.payloads.length; i++) {
|
||||
const payload = env.payloads[i];
|
||||
log_trace(`Payload ${i + 1} ('${payload.dataname}'):`);
|
||||
log_trace(` Transport: ${payload.transport}`);
|
||||
log_trace(` Type: ${payload.type}`);
|
||||
log_trace(` Size: ${payload.size} bytes`);
|
||||
log_trace(` Encoding: ${payload.encoding}`);
|
||||
|
||||
if (payload.transport === "link") {
|
||||
log_trace(` URL: ${payload.data}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run the test
|
||||
console.log("Starting text transport test...");
|
||||
console.log(`Correlation ID: ${correlation_id}`);
|
||||
|
||||
// Run sender
|
||||
console.log("start smartsend for text");
|
||||
test_text_send();
|
||||
|
||||
console.log("Test completed.");
|
||||
Reference in New Issue
Block a user