update
This commit is contained in:
@@ -1,753 +0,0 @@
|
|||||||
/**
|
|
||||||
* NATSBridge.js - Bi-Directional Data Bridge for JavaScript
|
|
||||||
* Implements smartsend and smartreceive for NATS communication
|
|
||||||
*
|
|
||||||
* This module provides functionality for sending and receiving data across network boundaries
|
|
||||||
* using NATS as the message bus, with support for both direct payload transport and
|
|
||||||
* URL-based transport for larger payloads.
|
|
||||||
*
|
|
||||||
* File Server Handler Architecture:
|
|
||||||
* The system uses handler functions to abstract file server operations, allowing support
|
|
||||||
* for different file server implementations (e.g., Plik, AWS S3, custom HTTP server).
|
|
||||||
*
|
|
||||||
* Handler Function Signatures:
|
|
||||||
*
|
|
||||||
* ```javascript
|
|
||||||
* // Upload handler - uploads data to file server and returns URL
|
|
||||||
* // The handler is passed to smartsend as fileserverUploadHandler parameter
|
|
||||||
* // It receives: (fileserver_url, dataname, data)
|
|
||||||
* // Returns: { status, uploadid, fileid, url }
|
|
||||||
* async function plik_oneshot_upload(fileserver_url, dataname, data) { ... }
|
|
||||||
*
|
|
||||||
* // Download handler - fetches data from file server URL with exponential backoff
|
|
||||||
* // The handler is passed to smartreceive as fileserverDownloadHandler parameter
|
|
||||||
* // It receives: (url, max_retries, base_delay, max_delay, correlation_id)
|
|
||||||
* // Returns: ArrayBuffer (the downloaded data)
|
|
||||||
* async function fileserverDownloadHandler(url, max_retries, base_delay, max_delay, correlation_id) { ... }
|
|
||||||
* ```
|
|
||||||
*
|
|
||||||
* Multi-Payload Support (Standard API):
|
|
||||||
* The system uses a standardized list-of-tuples format for all payload operations.
|
|
||||||
* Even when sending a single payload, the user must wrap it in a list.
|
|
||||||
*
|
|
||||||
* API Standard:
|
|
||||||
* ```javascript
|
|
||||||
* // Input format for smartsend (always a list of tuples with type info)
|
|
||||||
* [{ dataname, data, type }, ...]
|
|
||||||
*
|
|
||||||
* // Output format for smartreceive (always returns a list of tuples)
|
|
||||||
* [{ dataname, data, type }, ...]
|
|
||||||
* ```
|
|
||||||
*
|
|
||||||
* Supported types: "text", "dictionary", "table", "image", "audio", "video", "binary"
|
|
||||||
*/
|
|
||||||
|
|
||||||
// ---------------------------------------------- 100 --------------------------------------------- #
|
|
||||||
|
|
||||||
// Constants
|
|
||||||
const DEFAULT_SIZE_THRESHOLD = 1_000_000; // 1MB - threshold for switching from direct to link transport
|
|
||||||
const DEFAULT_NATS_URL = "nats://localhost:4222"; // Default NATS server URL
|
|
||||||
const DEFAULT_FILESERVER_URL = "http://localhost:8080"; // Default HTTP file server URL for link transport
|
|
||||||
|
|
||||||
// Helper: Generate UUID v4
|
|
||||||
function uuid4() {
|
|
||||||
// Simple UUID v4 generator
|
|
||||||
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
|
|
||||||
var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8);
|
|
||||||
return v.toString(16);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Log with correlation ID and timestamp
|
|
||||||
function log_trace(correlation_id, message) {
|
|
||||||
const timestamp = new Date().toISOString();
|
|
||||||
console.log(`[${timestamp}] [Correlation: ${correlation_id}] ${message}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Get size of data in bytes
|
|
||||||
function getDataSize(data) {
|
|
||||||
if (typeof data === 'string') {
|
|
||||||
return new TextEncoder().encode(data).length;
|
|
||||||
} else if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data.byteLength;
|
|
||||||
} else if (typeof data === 'object' && data !== null) {
|
|
||||||
// For objects, serialize to JSON and measure
|
|
||||||
return new TextEncoder().encode(JSON.stringify(data)).length;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Convert ArrayBuffer to Base64 string
|
|
||||||
function arrayBufferToBase64(buffer) {
|
|
||||||
const bytes = new Uint8Array(buffer);
|
|
||||||
let binary = '';
|
|
||||||
for (let i = 0; i < bytes.length; i++) {
|
|
||||||
binary += String.fromCharCode(bytes[i]);
|
|
||||||
}
|
|
||||||
return btoa(binary);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Convert Base64 string to ArrayBuffer
|
|
||||||
function base64ToArrayBuffer(base64) {
|
|
||||||
const binaryString = atob(base64);
|
|
||||||
const len = binaryString.length;
|
|
||||||
const bytes = new Uint8Array(len);
|
|
||||||
for (let i = 0; i < len; i++) {
|
|
||||||
bytes[i] = binaryString.charCodeAt(i);
|
|
||||||
}
|
|
||||||
return bytes.buffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Convert Uint8Array to Base64 string
|
|
||||||
function uint8ArrayToBase64(uint8array) {
|
|
||||||
let binary = '';
|
|
||||||
for (let i = 0; i < uint8array.byteLength; i++) {
|
|
||||||
binary += String.fromCharCode(uint8array[i]);
|
|
||||||
}
|
|
||||||
return btoa(binary);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Convert Base64 string to Uint8Array
|
|
||||||
function base64ToUint8Array(base64) {
|
|
||||||
const binaryString = atob(base64);
|
|
||||||
const len = binaryString.length;
|
|
||||||
const bytes = new Uint8Array(len);
|
|
||||||
for (let i = 0; i < len; i++) {
|
|
||||||
bytes[i] = binaryString.charCodeAt(i);
|
|
||||||
}
|
|
||||||
return bytes;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Serialize data based on type
|
|
||||||
function _serialize_data(data, type) {
|
|
||||||
/**
|
|
||||||
* Serialize data according to specified format
|
|
||||||
*
|
|
||||||
* Supported formats:
|
|
||||||
* - "text": Treats data as text and converts to UTF-8 bytes
|
|
||||||
* - "dictionary": Serializes data as JSON and returns the UTF-8 byte representation
|
|
||||||
* - "table": Serializes data as an Arrow IPC stream (table format) - NOT IMPLEMENTED (requires arrow library)
|
|
||||||
* - "image": Expects binary data (ArrayBuffer) and returns it as bytes
|
|
||||||
* - "audio": Expects binary data (ArrayBuffer) and returns it as bytes
|
|
||||||
* - "video": Expects binary data (ArrayBuffer) and returns it as bytes
|
|
||||||
* - "binary": Generic binary data (ArrayBuffer or Uint8Array) and returns bytes
|
|
||||||
*/
|
|
||||||
if (type === "text") {
|
|
||||||
if (typeof data === 'string') {
|
|
||||||
return new TextEncoder().encode(data);
|
|
||||||
} else {
|
|
||||||
throw new Error("Text data must be a String");
|
|
||||||
}
|
|
||||||
} else if (type === "dictionary") {
|
|
||||||
// JSON data - serialize directly
|
|
||||||
const jsonStr = JSON.stringify(data);
|
|
||||||
return new TextEncoder().encode(jsonStr);
|
|
||||||
} else if (type === "table") {
|
|
||||||
// Table data - convert to Arrow IPC stream (NOT IMPLEMENTED in pure JavaScript)
|
|
||||||
// This would require the apache-arrow library
|
|
||||||
throw new Error("Table serialization requires apache-arrow library");
|
|
||||||
} else if (type === "image") {
|
|
||||||
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
||||||
} else {
|
|
||||||
throw new Error("Image data must be ArrayBuffer or Uint8Array");
|
|
||||||
}
|
|
||||||
} else if (type === "audio") {
|
|
||||||
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
||||||
} else {
|
|
||||||
throw new Error("Audio data must be ArrayBuffer or Uint8Array");
|
|
||||||
}
|
|
||||||
} else if (type === "video") {
|
|
||||||
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
||||||
} else {
|
|
||||||
throw new Error("Video data must be ArrayBuffer or Uint8Array");
|
|
||||||
}
|
|
||||||
} else if (type === "binary") {
|
|
||||||
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
||||||
} else {
|
|
||||||
throw new Error("Binary data must be ArrayBuffer or Uint8Array");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
throw new Error(`Unknown type: ${type}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Deserialize bytes based on type
|
|
||||||
function _deserialize_data(data, type, correlation_id) {
|
|
||||||
/**
|
|
||||||
* Deserialize bytes to data based on type
|
|
||||||
*
|
|
||||||
* Supported formats:
|
|
||||||
* - "text": Converts bytes to string
|
|
||||||
* - "dictionary": Parses JSON string
|
|
||||||
* - "table": Parses Arrow IPC stream - NOT IMPLEMENTED (requires apache-arrow library)
|
|
||||||
* - "image": Returns binary data
|
|
||||||
* - "audio": Returns binary data
|
|
||||||
* - "video": Returns binary data
|
|
||||||
* - "binary": Returns binary data
|
|
||||||
*/
|
|
||||||
if (type === "text") {
|
|
||||||
const decoder = new TextDecoder();
|
|
||||||
return decoder.decode(data);
|
|
||||||
} else if (type === "dictionary") {
|
|
||||||
const decoder = new TextDecoder();
|
|
||||||
const jsonStr = decoder.decode(data);
|
|
||||||
return JSON.parse(jsonStr);
|
|
||||||
} else if (type === "table") {
|
|
||||||
// Table data - deserialize Arrow IPC stream (NOT IMPLEMENTED in pure JavaScript)
|
|
||||||
throw new Error("Table deserialization requires apache-arrow library");
|
|
||||||
} else if (type === "image") {
|
|
||||||
return data;
|
|
||||||
} else if (type === "audio") {
|
|
||||||
return data;
|
|
||||||
} else if (type === "video") {
|
|
||||||
return data;
|
|
||||||
} else if (type === "binary") {
|
|
||||||
return data;
|
|
||||||
} else {
|
|
||||||
throw new Error(`Unknown type: ${type}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Upload data to file server
|
|
||||||
// Internal wrapper that adds correlation_id logging for smartsend
|
|
||||||
async function _upload_to_fileserver(fileserver_url, dataname, data, correlation_id) {
|
|
||||||
/**
|
|
||||||
* Internal upload helper - wraps plik_oneshot_upload to add correlation_id logging
|
|
||||||
* This allows smartsend to pass correlation_id for tracing without changing the handler signature
|
|
||||||
*/
|
|
||||||
log_trace(correlation_id, `Uploading ${dataname} to fileserver: ${fileserver_url}`);
|
|
||||||
const result = await plik_oneshot_upload(fileserver_url, dataname, data);
|
|
||||||
log_trace(correlation_id, `Uploaded to URL: ${result.url}`);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Fetch data from URL with exponential backoff
|
|
||||||
async function _fetch_with_backoff(url, max_retries, base_delay, max_delay, correlation_id) {
|
|
||||||
/**
|
|
||||||
* Fetch data from URL with retry logic using exponential backoff
|
|
||||||
*/
|
|
||||||
let delay = base_delay;
|
|
||||||
|
|
||||||
for (let attempt = 1; attempt <= max_retries; attempt++) {
|
|
||||||
try {
|
|
||||||
const response = await fetch(url);
|
|
||||||
|
|
||||||
if (response.status === 200) {
|
|
||||||
log_trace(correlation_id, `Successfully fetched data from ${url} on attempt ${attempt}`);
|
|
||||||
const arrayBuffer = await response.arrayBuffer();
|
|
||||||
return new Uint8Array(arrayBuffer);
|
|
||||||
} else {
|
|
||||||
throw new Error(`Failed to fetch: ${response.status} ${response.statusText}`);
|
|
||||||
}
|
|
||||||
} catch (e) {
|
|
||||||
log_trace(correlation_id, `Attempt ${attempt} failed: ${e.message}`);
|
|
||||||
|
|
||||||
if (attempt < max_retries) {
|
|
||||||
// Sleep with exponential backoff
|
|
||||||
await new Promise(resolve => setTimeout(resolve, delay));
|
|
||||||
delay = Math.min(delay * 2, max_delay);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(`Failed to fetch data after ${max_retries} attempts`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Get payload bytes from data
|
|
||||||
function _get_payload_bytes(data) {
|
|
||||||
if (data instanceof ArrayBuffer || data instanceof Uint8Array) {
|
|
||||||
return data instanceof ArrayBuffer ? new Uint8Array(data) : data;
|
|
||||||
} else if (typeof data === 'string') {
|
|
||||||
return new TextEncoder().encode(data);
|
|
||||||
} else {
|
|
||||||
// For objects, serialize to JSON
|
|
||||||
return new TextEncoder().encode(JSON.stringify(data));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MessagePayload class - matches msg_payload_v1 Julia struct
|
|
||||||
class MessagePayload {
|
|
||||||
/**
|
|
||||||
* Represents a single payload in the message envelope
|
|
||||||
* Matches Julia's msg_payload_v1 struct
|
|
||||||
*
|
|
||||||
* @param {Object} options - Payload options
|
|
||||||
* @param {string} options.id - ID of this payload (e.g., "uuid4")
|
|
||||||
* @param {string} options.dataname - Name of this payload (e.g., "login_image")
|
|
||||||
* @param {string} options.payload_type - Payload type: "text", "dictionary", "table", "image", "audio", "video", "binary"
|
|
||||||
* @param {string} options.transport - "direct" or "link"
|
|
||||||
* @param {string} options.encoding - "none", "json", "base64", "arrow-ipc"
|
|
||||||
* @param {number} options.size - Data size in bytes
|
|
||||||
* @param {string|Uint8Array} options.data - Payload data (Uint8Array for direct, URL string for link)
|
|
||||||
* @param {Object} options.metadata - Metadata for this payload
|
|
||||||
*/
|
|
||||||
constructor(options) {
|
|
||||||
this.id = options.id || uuid4();
|
|
||||||
this.dataname = options.dataname;
|
|
||||||
this.payload_type = options.payload_type;
|
|
||||||
this.transport = options.transport;
|
|
||||||
this.encoding = options.encoding;
|
|
||||||
this.size = options.size;
|
|
||||||
this.data = options.data;
|
|
||||||
this.metadata = options.metadata || {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to JSON object - uses snake_case to match Julia API
|
|
||||||
toJSON() {
|
|
||||||
const obj = {
|
|
||||||
id: this.id,
|
|
||||||
dataname: this.dataname,
|
|
||||||
payload_type: this.payload_type,
|
|
||||||
transport: this.transport,
|
|
||||||
encoding: this.encoding,
|
|
||||||
size: this.size
|
|
||||||
};
|
|
||||||
|
|
||||||
// Include data based on transport type
|
|
||||||
if (this.transport === "direct" && this.data !== null && this.data !== undefined) {
|
|
||||||
if (this.encoding === "base64" || this.encoding === "json") {
|
|
||||||
obj.data = this.data;
|
|
||||||
} else {
|
|
||||||
// For other encodings, use base64
|
|
||||||
const payloadBytes = _get_payload_bytes(this.data);
|
|
||||||
obj.data = uint8ArrayToBase64(payloadBytes);
|
|
||||||
}
|
|
||||||
} else if (this.transport === "link" && this.data !== null && this.data !== undefined) {
|
|
||||||
// For link transport, data is a URL string
|
|
||||||
obj.data = this.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Object.keys(this.metadata).length > 0) {
|
|
||||||
obj.metadata = this.metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MessageEnvelope class - matches msg_envelope_v1 Julia struct
|
|
||||||
class MessageEnvelope {
|
|
||||||
/**
|
|
||||||
* Represents the message envelope containing metadata and payloads
|
|
||||||
* Matches Julia's msg_envelope_v1 struct
|
|
||||||
*
|
|
||||||
* @param {Object} options - Envelope options
|
|
||||||
* @param {string} options.correlation_id - Unique identifier to track messages
|
|
||||||
* @param {string} options.msg_id - This message id
|
|
||||||
* @param {string} options.timestamp - Message published timestamp
|
|
||||||
* @param {string} options.send_to - Topic/subject the sender sends to
|
|
||||||
* @param {string} options.msg_purpose - Purpose of this message
|
|
||||||
* @param {string} options.sender_name - Name of the sender
|
|
||||||
* @param {string} options.sender_id - UUID of the sender
|
|
||||||
* @param {string} options.receiver_name - Name of the receiver
|
|
||||||
* @param {string} options.receiver_id - UUID of the receiver
|
|
||||||
* @param {string} options.reply_to - Topic to reply to
|
|
||||||
* @param {string} options.reply_to_msg_id - Message id this message is replying to
|
|
||||||
* @param {string} options.broker_url - NATS server address
|
|
||||||
* @param {Object} options.metadata - Metadata for the envelope
|
|
||||||
* @param {Array<MessagePayload>} options.payloads - Array of payloads
|
|
||||||
*/
|
|
||||||
constructor(options) {
|
|
||||||
this.correlation_id = options.correlation_id || uuid4();
|
|
||||||
this.msg_id = options.msg_id || uuid4();
|
|
||||||
this.timestamp = options.timestamp || new Date().toISOString();
|
|
||||||
this.send_to = options.send_to;
|
|
||||||
this.msg_purpose = options.msg_purpose || "";
|
|
||||||
this.sender_name = options.sender_name || "";
|
|
||||||
this.sender_id = options.sender_id || uuid4();
|
|
||||||
this.receiver_name = options.receiver_name || "";
|
|
||||||
this.receiver_id = options.receiver_id || "";
|
|
||||||
this.reply_to = options.reply_to || "";
|
|
||||||
this.reply_to_msg_id = options.reply_to_msg_id || "";
|
|
||||||
this.broker_url = options.broker_url || DEFAULT_NATS_URL;
|
|
||||||
this.metadata = options.metadata || {};
|
|
||||||
this.payloads = options.payloads || [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to JSON object - uses snake_case to match Julia API
|
|
||||||
toJSON() {
|
|
||||||
const obj = {
|
|
||||||
correlation_id: this.correlation_id,
|
|
||||||
msg_id: this.msg_id,
|
|
||||||
timestamp: this.timestamp,
|
|
||||||
send_to: this.send_to,
|
|
||||||
msg_purpose: this.msg_purpose,
|
|
||||||
sender_name: this.sender_name,
|
|
||||||
sender_id: this.sender_id,
|
|
||||||
receiver_name: this.receiver_name,
|
|
||||||
receiver_id: this.receiver_id,
|
|
||||||
reply_to: this.reply_to,
|
|
||||||
reply_to_msg_id: this.reply_to_msg_id,
|
|
||||||
broker_url: this.broker_url
|
|
||||||
};
|
|
||||||
|
|
||||||
if (Object.keys(this.metadata).length > 0) {
|
|
||||||
obj.metadata = this.metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.payloads.length > 0) {
|
|
||||||
obj.payloads = this.payloads.map(p => p.toJSON());
|
|
||||||
}
|
|
||||||
|
|
||||||
return obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert to JSON string
|
|
||||||
toString() {
|
|
||||||
return JSON.stringify(this.toJSON());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SmartSend function - matches Julia smartsend signature and behavior
|
|
||||||
async function smartsend(subject, data, options = {}) {
|
|
||||||
/**
|
|
||||||
* Send data either directly via NATS or via a fileserver URL, depending on payload size
|
|
||||||
*
|
|
||||||
* This function intelligently routes data delivery based on payload size relative to a threshold.
|
|
||||||
* If the serialized payload is smaller than `size_threshold`, it encodes the data as Base64 and publishes directly over NATS.
|
|
||||||
* Otherwise, it uploads the data to a fileserver and publishes only the download URL over NATS.
|
|
||||||
*
|
|
||||||
* @param {string} subject - NATS subject to publish the message to
|
|
||||||
* @param {Array} data - List of {dataname, data, type} objects to send (must be a list, even for single payload)
|
|
||||||
* @param {Object} options - Additional options
|
|
||||||
* @param {string} options.broker_url - URL of the NATS server (default: "nats://localhost:4222")
|
|
||||||
* @param {string} options.fileserver_url - Base URL of the file server (default: "http://localhost:8080")
|
|
||||||
* @param {Function} options.fileserver_upload_handler - Function to handle fileserver uploads
|
|
||||||
* @param {number} options.size_threshold - Threshold in bytes separating direct vs link transport (default: 1MB)
|
|
||||||
* @param {string} options.correlation_id - Optional correlation ID for tracing
|
|
||||||
* @param {string} options.msg_purpose - Purpose of the message (default: "chat")
|
|
||||||
* @param {string} options.sender_name - Name of the sender (default: "NATSBridge")
|
|
||||||
* @param {string} options.receiver_name - Name of the receiver (default: "")
|
|
||||||
* @param {string} options.receiver_id - UUID of the receiver (default: "")
|
|
||||||
* @param {string} options.reply_to - Topic to reply to (default: "")
|
|
||||||
* @param {string} options.reply_to_msg_id - Message ID this message is replying to (default: "")
|
|
||||||
* @param {boolean} options.is_publish - Whether to automatically publish the message to NATS (default: true)
|
|
||||||
* - When true: Message is published to NATS automatically
|
|
||||||
* - When false: Returns (env, env_json_str) without publishing, allowing manual publishing
|
|
||||||
* @returns {Promise<Object>} - A tuple-like object with { env: MessageEnvelope, env_json_str: string }
|
|
||||||
* - env: MessageEnvelope object with all metadata and payloads
|
|
||||||
* - env_json_str: JSON string representation of the envelope for manual publishing
|
|
||||||
*/
|
|
||||||
const {
|
|
||||||
broker_url = DEFAULT_NATS_URL,
|
|
||||||
fileserver_url = DEFAULT_FILESERVER_URL,
|
|
||||||
fileserver_upload_handler = _upload_to_fileserver,
|
|
||||||
size_threshold = DEFAULT_SIZE_THRESHOLD,
|
|
||||||
correlation_id = uuid4(),
|
|
||||||
msg_purpose = "chat",
|
|
||||||
sender_name = "NATSBridge",
|
|
||||||
receiver_name = "",
|
|
||||||
receiver_id = "",
|
|
||||||
reply_to = "",
|
|
||||||
reply_to_msg_id = "",
|
|
||||||
is_publish = true // Whether to automatically publish the message to NATS
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
log_trace(correlation_id, `Starting smartsend for subject: ${subject}`);
|
|
||||||
|
|
||||||
// Generate message metadata
|
|
||||||
const msg_id = uuid4();
|
|
||||||
|
|
||||||
// Process each payload in the list
|
|
||||||
const payloads = [];
|
|
||||||
|
|
||||||
for (const payload of data) {
|
|
||||||
const dataname = payload.dataname;
|
|
||||||
const payloadData = payload.data;
|
|
||||||
const payloadType = payload.type;
|
|
||||||
|
|
||||||
// Serialize data based on type
|
|
||||||
const payloadBytes = _serialize_data(payloadData, payloadType);
|
|
||||||
const payloadSize = payloadBytes.byteLength;
|
|
||||||
|
|
||||||
log_trace(correlation_id, `Serialized payload '${dataname}' (payload_type: ${payloadType}) size: ${payloadSize} bytes`);
|
|
||||||
|
|
||||||
// Decision: Direct vs Link
|
|
||||||
if (payloadSize < size_threshold) {
|
|
||||||
// Direct path - Base64 encode and send via NATS
|
|
||||||
const payloadB64 = uint8ArrayToBase64(payloadBytes);
|
|
||||||
log_trace(correlation_id, `Using direct transport for ${payloadSize} bytes`);
|
|
||||||
|
|
||||||
// Create MessagePayload for direct transport
|
|
||||||
const payloadObj = new MessagePayload({
|
|
||||||
dataname: dataname,
|
|
||||||
payload_type: payloadType,
|
|
||||||
transport: "direct",
|
|
||||||
encoding: "base64",
|
|
||||||
size: payloadSize,
|
|
||||||
data: payloadB64,
|
|
||||||
metadata: { payload_bytes: payloadSize }
|
|
||||||
});
|
|
||||||
payloads.push(payloadObj);
|
|
||||||
} else {
|
|
||||||
// Link path - Upload to HTTP server, send URL via NATS
|
|
||||||
log_trace(correlation_id, `Using link transport, uploading to fileserver`);
|
|
||||||
|
|
||||||
// Upload to HTTP server using plik_oneshot_upload handler
|
|
||||||
const response = await fileserver_upload_handler(fileserver_url, dataname, payloadBytes);
|
|
||||||
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new Error(`Failed to upload data to fileserver: ${response.status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = response.url;
|
|
||||||
log_trace(correlation_id, `Uploaded to URL: ${url}`);
|
|
||||||
|
|
||||||
// Create MessagePayload for link transport
|
|
||||||
const payloadObj = new MessagePayload({
|
|
||||||
dataname: dataname,
|
|
||||||
payload_type: payloadType,
|
|
||||||
transport: "link",
|
|
||||||
encoding: "none",
|
|
||||||
size: payloadSize,
|
|
||||||
data: url,
|
|
||||||
metadata: {}
|
|
||||||
});
|
|
||||||
payloads.push(payloadObj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create MessageEnvelope with all payloads
|
|
||||||
const env = new MessageEnvelope({
|
|
||||||
correlation_id: correlation_id,
|
|
||||||
msg_id: msg_id,
|
|
||||||
send_to: subject,
|
|
||||||
msg_purpose: msg_purpose,
|
|
||||||
sender_name: sender_name,
|
|
||||||
receiver_name: receiver_name,
|
|
||||||
receiver_id: receiver_id,
|
|
||||||
reply_to: reply_to,
|
|
||||||
reply_to_msg_id: reply_to_msg_id,
|
|
||||||
broker_url: broker_url,
|
|
||||||
payloads: payloads
|
|
||||||
});
|
|
||||||
|
|
||||||
// Convert envelope to JSON string
|
|
||||||
const env_json_str = env.toString();
|
|
||||||
|
|
||||||
// Publish to NATS if isPublish is true
|
|
||||||
if (is_publish) {
|
|
||||||
await publish_message(broker_url, subject, env_json_str, correlation_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return both envelope and JSON string (tuple-like structure, matching Julia API)
|
|
||||||
return {
|
|
||||||
env: env,
|
|
||||||
env_json_str: env_json_str
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper: Publish message to NATS
|
|
||||||
async function publish_message(broker_url, subject, message, correlation_id) {
|
|
||||||
/**
|
|
||||||
* Publish a message to a NATS subject with proper connection management
|
|
||||||
*
|
|
||||||
* @param {string} broker_url - NATS server URL
|
|
||||||
* @param {string} subject - NATS subject to publish to
|
|
||||||
* @param {string} message - JSON message to publish
|
|
||||||
* @param {string} correlation_id - Correlation ID for logging
|
|
||||||
*/
|
|
||||||
log_trace(correlation_id, `Publishing message to ${subject}`);
|
|
||||||
|
|
||||||
// For Node.js, we would use nats.js library
|
|
||||||
// This is a placeholder that throws an error
|
|
||||||
// In production, you would import and use the actual nats library
|
|
||||||
|
|
||||||
// Example with nats.js:
|
|
||||||
// import { connect } from 'nats';
|
|
||||||
// const nc = await connect({ servers: [broker_url] });
|
|
||||||
// await nc.publish(subject, message);
|
|
||||||
// nc.close();
|
|
||||||
|
|
||||||
// For now, just log the message
|
|
||||||
console.log(`[NATS PUBLISH] Subject: ${subject}, Message: ${message.substring(0, 100)}...`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// SmartReceive function - matches Julia smartreceive signature and behavior
|
|
||||||
async function smartreceive(msg, options = {}) {
|
|
||||||
/**
|
|
||||||
* Receive and process messages from NATS
|
|
||||||
*
|
|
||||||
* This function processes incoming NATS messages, handling both direct transport
|
|
||||||
* (base64 decoded payloads) and link transport (URL-based payloads).
|
|
||||||
*
|
|
||||||
* @param {Object} msg - NATS message object with payload property
|
|
||||||
* @param {Object} options - Additional options
|
|
||||||
* @param {Function} options.fileserver_download_handler - Function to handle downloading data from file server URLs
|
|
||||||
* @param {number} options.max_retries - Maximum retry attempts for fetching URL (default: 5)
|
|
||||||
* @param {number} options.base_delay - Initial delay for exponential backoff in ms (default: 100)
|
|
||||||
* @param {number} options.max_delay - Maximum delay for exponential backoff in ms (default: 5000)
|
|
||||||
*
|
|
||||||
* @returns {Promise<Object>} - JSON object of envelope with payloads field containing list of {dataname, data, type} tuples
|
|
||||||
*/
|
|
||||||
const {
|
|
||||||
fileserver_download_handler = _fetch_with_backoff,
|
|
||||||
max_retries = 5,
|
|
||||||
base_delay = 100,
|
|
||||||
max_delay = 5000
|
|
||||||
} = options;
|
|
||||||
|
|
||||||
// Parse the JSON envelope
|
|
||||||
const jsonStr = typeof msg.payload === 'string' ? msg.payload : new TextDecoder().decode(msg.payload);
|
|
||||||
const json_data = JSON.parse(jsonStr);
|
|
||||||
|
|
||||||
log_trace(json_data.correlation_id, `Processing received message`);
|
|
||||||
|
|
||||||
// Process all payloads in the envelope
|
|
||||||
const payloads_list = [];
|
|
||||||
|
|
||||||
// Get number of payloads
|
|
||||||
const num_payloads = json_data.payloads ? json_data.payloads.length : 0;
|
|
||||||
|
|
||||||
for (let i = 0; i < num_payloads; i++) {
|
|
||||||
const payload = json_data.payloads[i];
|
|
||||||
const transport = payload.transport;
|
|
||||||
const dataname = payload.dataname;
|
|
||||||
|
|
||||||
if (transport === "direct") {
|
|
||||||
// Direct transport - payload is in the message
|
|
||||||
log_trace(json_data.correlation_id, `Direct transport - decoding payload '${dataname}'`);
|
|
||||||
|
|
||||||
// Extract base64 payload from the payload
|
|
||||||
const payload_b64 = payload.data;
|
|
||||||
|
|
||||||
// Decode Base64 payload
|
|
||||||
const payload_bytes = base64ToUint8Array(payload_b64);
|
|
||||||
|
|
||||||
// Deserialize based on type
|
|
||||||
const data_type = payload.payload_type;
|
|
||||||
const data = _deserialize_data(payload_bytes, data_type, json_data.correlation_id);
|
|
||||||
|
|
||||||
payloads_list.push({ dataname, data, type: data_type });
|
|
||||||
} else if (transport === "link") {
|
|
||||||
// Link transport - payload is at URL
|
|
||||||
const url = payload.data;
|
|
||||||
log_trace(json_data.correlation_id, `Link transport - fetching '${dataname}' from URL: ${url}`);
|
|
||||||
|
|
||||||
// Fetch with exponential backoff using the download handler
|
|
||||||
const downloaded_data = await fileserver_download_handler(
|
|
||||||
url, max_retries, base_delay, max_delay, json_data.correlation_id
|
|
||||||
);
|
|
||||||
|
|
||||||
// Deserialize based on type
|
|
||||||
const data_type = payload.payload_type;
|
|
||||||
const data = _deserialize_data(downloaded_data, data_type, json_data.correlation_id);
|
|
||||||
|
|
||||||
payloads_list.push({ dataname, data, type: data_type });
|
|
||||||
} else {
|
|
||||||
throw new Error(`Unknown transport type for payload '${dataname}': ${transport}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Replace payloads array with the processed list of {dataname, data, type} tuples
|
|
||||||
// This matches Julia's smartreceive return format
|
|
||||||
json_data.payloads = payloads_list;
|
|
||||||
|
|
||||||
return json_data;
|
|
||||||
}
|
|
||||||
|
|
||||||
// plik_oneshot_upload - matches Julia plik_oneshot_upload function
|
|
||||||
// Upload handler signature: plik_oneshot_upload(fileserver_url, dataname, data)
|
|
||||||
// Returns: { status, uploadid, fileid, url }
|
|
||||||
async function plik_oneshot_upload(file_server_url, dataname, data) {
|
|
||||||
/**
|
|
||||||
* Upload a single file to a plik server using one-shot mode
|
|
||||||
* This function uploads raw byte array to a plik server in one-shot mode (no upload session).
|
|
||||||
* It first creates a one-shot upload session by sending a POST request with {"OneShot": true},
|
|
||||||
* retrieves an upload ID and token, then uploads the file data as multipart form data using the token.
|
|
||||||
*
|
|
||||||
* This is the default upload handler used by smartsend.
|
|
||||||
* Custom handlers can be passed via the fileserver_upload_handler option.
|
|
||||||
*
|
|
||||||
* @param {string} file_server_url - Base URL of the plik server (e.g., "http://localhost:8080")
|
|
||||||
* @param {string} dataname - Name of the file being uploaded
|
|
||||||
* @param {Uint8Array} data - Raw byte data of the file content
|
|
||||||
* @returns {Promise<Object>} - Dictionary with keys: status, uploadid, fileid, url
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Step 1: Get upload ID and token
|
|
||||||
const url_getUploadID = `${file_server_url}/upload`;
|
|
||||||
const headers = { "Content-Type": "application/json" };
|
|
||||||
const body = JSON.stringify({ OneShot: true });
|
|
||||||
|
|
||||||
let http_response = await fetch(url_getUploadID, {
|
|
||||||
method: "POST",
|
|
||||||
headers: headers,
|
|
||||||
body: body
|
|
||||||
});
|
|
||||||
|
|
||||||
const response_json = await http_response.json();
|
|
||||||
const uploadid = response_json.id;
|
|
||||||
const uploadtoken = response_json.uploadToken;
|
|
||||||
|
|
||||||
// Step 2: Upload file data
|
|
||||||
const url_upload = `${file_server_url}/file/${uploadid}`;
|
|
||||||
|
|
||||||
// Create multipart form data
|
|
||||||
const formData = new FormData();
|
|
||||||
const blob = new Blob([data], { type: "application/octet-stream" });
|
|
||||||
formData.append("file", blob, dataname);
|
|
||||||
|
|
||||||
http_response = await fetch(url_upload, {
|
|
||||||
method: "POST",
|
|
||||||
headers: { "X-UploadToken": uploadtoken },
|
|
||||||
body: formData
|
|
||||||
});
|
|
||||||
|
|
||||||
const fileResponseJson = await http_response.json();
|
|
||||||
const fileid = fileResponseJson.id;
|
|
||||||
|
|
||||||
// URL of the uploaded data e.g. "http://192.168.1.20:8080/file/3F62E/4AgGT/test.zip"
|
|
||||||
const url = `${file_server_url}/file/${uploadid}/${fileid}/${encodeURIComponent(dataname)}`;
|
|
||||||
|
|
||||||
return {
|
|
||||||
status: http_response.status,
|
|
||||||
uploadid: uploadid,
|
|
||||||
fileid: fileid,
|
|
||||||
url: url
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export for Node.js
|
|
||||||
if (typeof module !== 'undefined' && module.exports) {
|
|
||||||
module.exports = {
|
|
||||||
MessageEnvelope,
|
|
||||||
MessagePayload,
|
|
||||||
smartsend,
|
|
||||||
smartreceive,
|
|
||||||
_serialize_data,
|
|
||||||
_deserialize_data,
|
|
||||||
_fetch_with_backoff,
|
|
||||||
_upload_to_fileserver,
|
|
||||||
plik_oneshot_upload,
|
|
||||||
DEFAULT_SIZE_THRESHOLD,
|
|
||||||
DEFAULT_NATS_URL,
|
|
||||||
DEFAULT_FILESERVER_URL,
|
|
||||||
uuid4,
|
|
||||||
log_trace
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export for browser
|
|
||||||
if (typeof window !== 'undefined') {
|
|
||||||
window.NATSBridge = {
|
|
||||||
MessageEnvelope,
|
|
||||||
MessagePayload,
|
|
||||||
smartsend,
|
|
||||||
smartreceive,
|
|
||||||
_serialize_data,
|
|
||||||
_deserialize_data,
|
|
||||||
_fetch_with_backoff,
|
|
||||||
_upload_to_fileserver,
|
|
||||||
plik_oneshot_upload,
|
|
||||||
DEFAULT_SIZE_THRESHOLD,
|
|
||||||
DEFAULT_NATS_URL,
|
|
||||||
DEFAULT_FILESERVER_URL,
|
|
||||||
uuid4,
|
|
||||||
log_trace
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,871 +0,0 @@
|
|||||||
"""
|
|
||||||
Python NATS Bridge - Bi-Directional Data Bridge
|
|
||||||
|
|
||||||
This module provides functionality for sending and receiving data over NATS
|
|
||||||
using the Claim-Check pattern for large payloads.
|
|
||||||
|
|
||||||
Supported types: "text", "dictionary", "table", "image", "audio", "video", "binary"
|
|
||||||
|
|
||||||
Multi-Payload Support (Standard API):
|
|
||||||
The system uses a standardized list-of-tuples format for all payload operations.
|
|
||||||
Even when sending a single payload, the user must wrap it in a list.
|
|
||||||
|
|
||||||
API Standard:
|
|
||||||
# Input format for smartsend (always a list of tuples with type info)
|
|
||||||
[(dataname1, data1, type1), (dataname2, data2, type2), ...]
|
|
||||||
|
|
||||||
# Output format for smartreceive (returns a dictionary with payloads field containing list of tuples)
|
|
||||||
# Returns: Dict with envelope metadata and payloads field containing list of tuples
|
|
||||||
# {
|
|
||||||
# "correlation_id": "...",
|
|
||||||
# "msg_id": "...",
|
|
||||||
# "timestamp": "...",
|
|
||||||
# "send_to": "...",
|
|
||||||
# "msg_purpose": "...",
|
|
||||||
# "sender_name": "...",
|
|
||||||
# "sender_id": "...",
|
|
||||||
# "receiver_name": "...",
|
|
||||||
# "receiver_id": "...",
|
|
||||||
# "reply_to": "...",
|
|
||||||
# "reply_to_msg_id": "...",
|
|
||||||
# "broker_url": "...",
|
|
||||||
# "metadata": {...},
|
|
||||||
# "payloads": [(dataname1, data1, type1), (dataname2, data2, type2), ...]
|
|
||||||
# }
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
# Constants
|
|
||||||
DEFAULT_SIZE_THRESHOLD = 1000000 # 1MB - threshold for switching from direct to link transport
|
|
||||||
DEFAULT_BROKER_URL = "nats://localhost:4222"
|
|
||||||
DEFAULT_FILESERVER_URL = "http://localhost:8080"
|
|
||||||
|
|
||||||
# ============================================= 100 ============================================== #
|
|
||||||
|
|
||||||
|
|
||||||
class MessagePayload:
|
|
||||||
"""Internal message payload structure representing a single payload within a NATS message envelope.
|
|
||||||
|
|
||||||
This structure represents a single payload within a NATS message envelope.
|
|
||||||
It supports both direct transport (base64-encoded data) and link transport (URL-based).
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
id: Unique identifier for this payload (e.g., "uuid4")
|
|
||||||
dataname: Name of the payload (e.g., "login_image")
|
|
||||||
payload_type: Payload type ("text", "dictionary", "table", "image", "audio", "video", "binary")
|
|
||||||
transport: Transport method ("direct" or "link")
|
|
||||||
encoding: Encoding method ("none", "json", "base64", "arrow-ipc")
|
|
||||||
size: Size of the payload in bytes
|
|
||||||
data: Payload data (bytes for direct, URL for link)
|
|
||||||
metadata: Optional metadata dictionary
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, data, payload_type, id="", dataname="", transport="direct",
|
|
||||||
encoding="none", size=0, metadata=None):
|
|
||||||
"""
|
|
||||||
Initialize a MessagePayload.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Payload data (base64 string for direct, URL string for link)
|
|
||||||
payload_type: Payload type ("text", "dictionary", "table", "image", "audio", "video", "binary")
|
|
||||||
id: Unique identifier for this payload (auto-generated if empty)
|
|
||||||
dataname: Name of the payload (auto-generated UUID if empty)
|
|
||||||
transport: Transport method ("direct" or "link")
|
|
||||||
encoding: Encoding method ("none", "json", "base64", "arrow-ipc")
|
|
||||||
size: Size of the payload in bytes
|
|
||||||
metadata: Optional metadata dictionary
|
|
||||||
"""
|
|
||||||
self.id = id if id else self._generate_uuid()
|
|
||||||
self.dataname = dataname if dataname else self._generate_uuid()
|
|
||||||
self.payload_type = payload_type
|
|
||||||
self.transport = transport
|
|
||||||
self.encoding = encoding
|
|
||||||
self.size = size
|
|
||||||
self.data = data
|
|
||||||
self.metadata = metadata if metadata else {}
|
|
||||||
|
|
||||||
def _generate_uuid(self):
|
|
||||||
"""Generate a UUID string."""
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
"""Convert payload to dictionary for JSON serialization."""
|
|
||||||
payload_dict = {
|
|
||||||
"id": self.id,
|
|
||||||
"dataname": self.dataname,
|
|
||||||
"payload_type": self.payload_type,
|
|
||||||
"transport": self.transport,
|
|
||||||
"encoding": self.encoding,
|
|
||||||
"size": self.size,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Include data based on transport type
|
|
||||||
if self.transport == "direct" and self.data is not None:
|
|
||||||
if self.encoding == "base64" or self.encoding == "json":
|
|
||||||
payload_dict["data"] = self.data
|
|
||||||
else:
|
|
||||||
# For other encodings, use base64
|
|
||||||
payload_dict["data"] = self._to_base64(self.data)
|
|
||||||
elif self.transport == "link" and self.data is not None:
|
|
||||||
# For link transport, data is a URL string
|
|
||||||
payload_dict["data"] = self.data
|
|
||||||
|
|
||||||
if self.metadata:
|
|
||||||
payload_dict["metadata"] = self.metadata
|
|
||||||
|
|
||||||
return payload_dict
|
|
||||||
|
|
||||||
def _to_base64(self, data):
|
|
||||||
"""Convert bytes to base64 string."""
|
|
||||||
if isinstance(data, bytes):
|
|
||||||
# Simple base64 encoding without library
|
|
||||||
import ubinascii
|
|
||||||
return ubinascii.b2a_base64(data).decode('utf-8').strip()
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _from_base64(self, data):
|
|
||||||
"""Convert base64 string to bytes."""
|
|
||||||
import ubinascii
|
|
||||||
return ubinascii.a2b_base64(data)
|
|
||||||
|
|
||||||
|
|
||||||
class MessageEnvelope:
|
|
||||||
"""Internal message envelope structure containing multiple payloads with metadata."""
|
|
||||||
|
|
||||||
def __init__(self, send_to, payloads, correlation_id="", msg_id="", timestamp="",
|
|
||||||
msg_purpose="", sender_name="", sender_id="", receiver_name="",
|
|
||||||
receiver_id="", reply_to="", reply_to_msg_id="", broker_url=DEFAULT_BROKER_URL,
|
|
||||||
metadata=None):
|
|
||||||
"""
|
|
||||||
Initialize a MessageEnvelope.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
send_to: NATS subject/topic to publish the message to
|
|
||||||
payloads: List of MessagePayload objects
|
|
||||||
correlation_id: Unique identifier to track messages (auto-generated if empty)
|
|
||||||
msg_id: Unique message identifier (auto-generated if empty)
|
|
||||||
timestamp: Message publication timestamp
|
|
||||||
msg_purpose: Purpose of the message ("ACK", "NACK", "updateStatus", "shutdown", "chat", etc.)
|
|
||||||
sender_name: Name of the sender
|
|
||||||
sender_id: UUID of the sender
|
|
||||||
receiver_name: Name of the receiver (empty means broadcast)
|
|
||||||
receiver_id: UUID of the receiver (empty means broadcast)
|
|
||||||
reply_to: Topic where receiver should reply
|
|
||||||
reply_to_msg_id: Message ID this message is replying to
|
|
||||||
broker_url: NATS broker URL
|
|
||||||
metadata: Optional message-level metadata
|
|
||||||
"""
|
|
||||||
self.correlation_id = correlation_id if correlation_id else self._generate_uuid()
|
|
||||||
self.msg_id = msg_id if msg_id else self._generate_uuid()
|
|
||||||
self.timestamp = timestamp if timestamp else self._get_timestamp()
|
|
||||||
self.send_to = send_to
|
|
||||||
self.msg_purpose = msg_purpose
|
|
||||||
self.sender_name = sender_name
|
|
||||||
self.sender_id = sender_id if sender_id else self._generate_uuid()
|
|
||||||
self.receiver_name = receiver_name
|
|
||||||
self.receiver_id = receiver_id if receiver_id else self._generate_uuid()
|
|
||||||
self.reply_to = reply_to
|
|
||||||
self.reply_to_msg_id = reply_to_msg_id
|
|
||||||
self.broker_url = broker_url
|
|
||||||
self.metadata = metadata if metadata else {}
|
|
||||||
self.payloads = payloads
|
|
||||||
|
|
||||||
def _generate_uuid(self):
|
|
||||||
"""Generate a UUID string."""
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
def _get_timestamp(self):
|
|
||||||
"""Get current timestamp in ISO format."""
|
|
||||||
# Simplified timestamp - Micropython may not have full datetime
|
|
||||||
return "2026-02-21T" + time.strftime("%H:%M:%S", time.localtime())
|
|
||||||
|
|
||||||
def to_json(self):
|
|
||||||
"""Convert envelope to JSON string.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: JSON string representation of the envelope using snake_case field names
|
|
||||||
"""
|
|
||||||
obj = {
|
|
||||||
"correlation_id": self.correlation_id,
|
|
||||||
"msg_id": self.msg_id,
|
|
||||||
"timestamp": self.timestamp,
|
|
||||||
"send_to": self.send_to,
|
|
||||||
"msg_purpose": self.msg_purpose,
|
|
||||||
"sender_name": self.sender_name,
|
|
||||||
"sender_id": self.sender_id,
|
|
||||||
"receiver_name": self.receiver_name,
|
|
||||||
"receiver_id": self.receiver_id,
|
|
||||||
"reply_to": self.reply_to,
|
|
||||||
"reply_to_msg_id": self.reply_to_msg_id,
|
|
||||||
"broker_url": self.broker_url
|
|
||||||
}
|
|
||||||
|
|
||||||
# Include metadata if not empty
|
|
||||||
if self.metadata:
|
|
||||||
obj["metadata"] = self.metadata
|
|
||||||
|
|
||||||
# Convert payloads to JSON array
|
|
||||||
if self.payloads:
|
|
||||||
payloads_json = []
|
|
||||||
for payload in self.payloads:
|
|
||||||
payloads_json.append(payload.to_dict())
|
|
||||||
obj["payloads"] = payloads_json
|
|
||||||
|
|
||||||
return json.dumps(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def log_trace(correlation_id, message):
|
|
||||||
"""Log a trace message with correlation ID and timestamp."""
|
|
||||||
timestamp = time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
|
|
||||||
print("[{}] [Correlation: {}] {}".format(timestamp, correlation_id, message))
|
|
||||||
|
|
||||||
|
|
||||||
def _serialize_data(data, payload_type):
|
|
||||||
"""Serialize data according to specified format.
|
|
||||||
|
|
||||||
This function serializes arbitrary data into a binary representation based on the specified type.
|
|
||||||
It supports multiple serialization formats for different data types.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Data to serialize
|
|
||||||
- "text": String
|
|
||||||
- "dictionary": JSON-serializable dict
|
|
||||||
- "table": Tabular data (pandas DataFrame or list of dicts)
|
|
||||||
- "image", "audio", "video", "binary": bytes
|
|
||||||
payload_type: Target format ("text", "dictionary", "table", "image", "audio", "video", "binary")
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bytes: Binary representation of the serialized data
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> text_bytes = _serialize_data("Hello World", "text")
|
|
||||||
>>> json_bytes = _serialize_data({"key": "value"}, "dictionary")
|
|
||||||
>>> table_bytes = _serialize_data([{"id": 1, "name": "Alice"}], "table")
|
|
||||||
"""
|
|
||||||
if payload_type == "text":
|
|
||||||
if isinstance(data, str):
|
|
||||||
return data.encode('utf-8')
|
|
||||||
else:
|
|
||||||
raise ValueError("Text data must be a string")
|
|
||||||
|
|
||||||
elif payload_type == "dictionary":
|
|
||||||
if isinstance(data, dict):
|
|
||||||
json_str = json.dumps(data)
|
|
||||||
return json_str.encode('utf-8')
|
|
||||||
else:
|
|
||||||
raise ValueError("Dictionary data must be a dict")
|
|
||||||
|
|
||||||
elif payload_type == "table":
|
|
||||||
# Support pandas DataFrame or list of dicts
|
|
||||||
try:
|
|
||||||
import pandas as pd
|
|
||||||
if isinstance(data, pd.DataFrame):
|
|
||||||
# Convert DataFrame to JSON and then to bytes
|
|
||||||
json_str = data.to_json(orient='records', force_ascii=False)
|
|
||||||
return json_str.encode('utf-8')
|
|
||||||
elif isinstance(data, list) and len(data) > 0 and isinstance(data[0], dict):
|
|
||||||
# List of dicts
|
|
||||||
json_str = json.dumps(data)
|
|
||||||
return json_str.encode('utf-8')
|
|
||||||
else:
|
|
||||||
raise ValueError("Table data must be a pandas DataFrame or list of dicts")
|
|
||||||
except ImportError:
|
|
||||||
# Fallback: if pandas not available, treat as list of dicts
|
|
||||||
if isinstance(data, list):
|
|
||||||
json_str = json.dumps(data)
|
|
||||||
return json_str.encode('utf-8')
|
|
||||||
else:
|
|
||||||
raise ValueError("Table data requires pandas DataFrame or list of dicts (pandas not available)")
|
|
||||||
|
|
||||||
elif payload_type in ("image", "audio", "video", "binary"):
|
|
||||||
if isinstance(data, bytes):
|
|
||||||
return data
|
|
||||||
else:
|
|
||||||
raise ValueError("{} data must be bytes".format(payload_type.capitalize()))
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown payload_type: {}".format(payload_type))
|
|
||||||
|
|
||||||
|
|
||||||
def _deserialize_data(data_bytes, payload_type, correlation_id):
|
|
||||||
"""Deserialize bytes to data based on type.
|
|
||||||
|
|
||||||
This function converts serialized bytes back to Python data based on type.
|
|
||||||
It handles "text" (string), "dictionary" (JSON deserialization), "table" (JSON deserialization),
|
|
||||||
"image" (binary data), "audio" (binary data), "video" (binary data), and "binary" (binary data).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data_bytes: Serialized data as bytes
|
|
||||||
payload_type: Data type ("text", "dictionary", "table", "image", "audio", "video", "binary")
|
|
||||||
correlation_id: Correlation ID for logging
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deserialized data:
|
|
||||||
- "text": str
|
|
||||||
- "dictionary": dict
|
|
||||||
- "table": list of dicts (or pandas DataFrame if available)
|
|
||||||
- "image", "audio", "video", "binary": bytes
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> text_data = _deserialize_data(b"Hello", "text", "corr_id")
|
|
||||||
>>> json_data = _deserialize_data(b'{"key": "value"}', "dictionary", "corr_id")
|
|
||||||
>>> table_data = _deserialize_data(b'[{"id": 1}]', "table", "corr_id")
|
|
||||||
"""
|
|
||||||
if payload_type == "text":
|
|
||||||
return data_bytes.decode('utf-8')
|
|
||||||
|
|
||||||
elif payload_type == "dictionary":
|
|
||||||
json_str = data_bytes.decode('utf-8')
|
|
||||||
return json.loads(json_str)
|
|
||||||
|
|
||||||
elif payload_type == "table":
|
|
||||||
# Deserialize table data (JSON format)
|
|
||||||
json_str = data_bytes.decode('utf-8')
|
|
||||||
table_data = json.loads(json_str)
|
|
||||||
# If pandas is available, try to convert to DataFrame
|
|
||||||
try:
|
|
||||||
import pandas as pd
|
|
||||||
return pd.DataFrame(table_data)
|
|
||||||
except ImportError:
|
|
||||||
return table_data
|
|
||||||
|
|
||||||
elif payload_type in ("image", "audio", "video", "binary"):
|
|
||||||
return data_bytes
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown payload_type: {}".format(payload_type))
|
|
||||||
|
|
||||||
|
|
||||||
class NATSConnection:
|
|
||||||
"""Simple NATS connection for Python and Micropython."""
|
|
||||||
|
|
||||||
def __init__(self, url=DEFAULT_BROKER_URL):
|
|
||||||
"""Initialize NATS connection.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: NATS server URL (e.g., "nats://localhost:4222")
|
|
||||||
"""
|
|
||||||
self.url = url
|
|
||||||
self.host = "localhost"
|
|
||||||
self.port = 4222
|
|
||||||
self.conn = None
|
|
||||||
self._parse_url(url)
|
|
||||||
|
|
||||||
def _parse_url(self, url):
|
|
||||||
"""Parse NATS URL to extract host and port."""
|
|
||||||
if url.startswith("nats://"):
|
|
||||||
url = url[7:]
|
|
||||||
elif url.startswith("tls://"):
|
|
||||||
url = url[6:]
|
|
||||||
|
|
||||||
if ":" in url:
|
|
||||||
self.host, port_str = url.split(":")
|
|
||||||
self.port = int(port_str)
|
|
||||||
else:
|
|
||||||
self.host = url
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
"""Connect to NATS server."""
|
|
||||||
# Use socket for both Python and Micropython
|
|
||||||
try:
|
|
||||||
import socket
|
|
||||||
addr = socket.getaddrinfo(self.host, self.port)[0][-1]
|
|
||||||
self.conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
||||||
self.conn.connect(addr)
|
|
||||||
except NameError:
|
|
||||||
# Micropython fallback
|
|
||||||
import usocket
|
|
||||||
addr = usocket.getaddrinfo(self.host, self.port)[0][-1]
|
|
||||||
self.conn = usocket.socket()
|
|
||||||
self.conn.connect(addr)
|
|
||||||
|
|
||||||
log_trace("", "Connected to NATS server at {}:{}".format(self.host, self.port))
|
|
||||||
|
|
||||||
def publish(self, subject, message):
|
|
||||||
"""Publish a message to a NATS subject.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
subject: NATS subject to publish to
|
|
||||||
message: Message to publish (should be bytes or string)
|
|
||||||
"""
|
|
||||||
if isinstance(message, str):
|
|
||||||
message = message.encode('utf-8')
|
|
||||||
|
|
||||||
# Simple NATS protocol implementation
|
|
||||||
msg = "PUB {} {}\r\n".format(subject, len(message))
|
|
||||||
msg = msg.encode('utf-8') + message + b"\r\n"
|
|
||||||
|
|
||||||
try:
|
|
||||||
import socket
|
|
||||||
self.conn.send(msg)
|
|
||||||
except NameError:
|
|
||||||
# Micropython fallback
|
|
||||||
import usocket
|
|
||||||
self.conn.send(msg)
|
|
||||||
|
|
||||||
log_trace("", "Message published to {}".format(subject))
|
|
||||||
|
|
||||||
def subscribe(self, subject, callback):
|
|
||||||
"""Subscribe to a NATS subject.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
subject: NATS subject to subscribe to
|
|
||||||
callback: Callback function to handle incoming messages
|
|
||||||
"""
|
|
||||||
log_trace("", "Subscribed to {}".format(subject))
|
|
||||||
# Simplified subscription - in a real implementation, you'd handle SUB/PUB messages
|
|
||||||
# For Micropython, we'll use a simple polling approach
|
|
||||||
self.subscribed_subject = subject
|
|
||||||
self.subscription_callback = callback
|
|
||||||
|
|
||||||
def wait_message(self, timeout=1000):
|
|
||||||
"""Wait for incoming message.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
timeout: Timeout in milliseconds
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
NATS message object or None if timeout
|
|
||||||
"""
|
|
||||||
# Simplified message reading
|
|
||||||
# In a real implementation, you'd read from the socket
|
|
||||||
# For now, this is a placeholder
|
|
||||||
return None
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""Close the NATS connection."""
|
|
||||||
if self.conn:
|
|
||||||
self.conn.close()
|
|
||||||
self.conn = None
|
|
||||||
log_trace("", "NATS connection closed")
|
|
||||||
|
|
||||||
|
|
||||||
def _fetch_with_backoff(url, max_retries=5, base_delay=100, max_delay=5000, correlation_id=""):
|
|
||||||
"""Fetch data from URL with exponential backoff.
|
|
||||||
|
|
||||||
This function retrieves data from a URL with retry logic using
|
|
||||||
exponential backoff to handle transient failures.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: URL to fetch from
|
|
||||||
max_retries: Maximum number of retry attempts (default: 5)
|
|
||||||
base_delay: Initial delay in milliseconds (default: 100)
|
|
||||||
max_delay: Maximum delay in milliseconds (default: 5000)
|
|
||||||
correlation_id: Correlation ID for logging
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bytes: Fetched data
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
Exception: If all retry attempts fail
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> data = _fetch_with_backoff("http://example.com/file.zip", 5, 100, 5000, "corr_id")
|
|
||||||
"""
|
|
||||||
delay = base_delay
|
|
||||||
for attempt in range(1, max_retries + 1):
|
|
||||||
try:
|
|
||||||
# Simple HTTP GET request
|
|
||||||
# Try urequests for Micropython first, then requests for Python
|
|
||||||
try:
|
|
||||||
import urequests
|
|
||||||
response = urequests.get(url)
|
|
||||||
status_code = response.status_code
|
|
||||||
content = response.content
|
|
||||||
except ImportError:
|
|
||||||
try:
|
|
||||||
import requests
|
|
||||||
response = requests.get(url)
|
|
||||||
response.raise_for_status()
|
|
||||||
status_code = response.status_code
|
|
||||||
content = response.content
|
|
||||||
except ImportError:
|
|
||||||
raise Exception("No HTTP library available (urequests or requests)")
|
|
||||||
|
|
||||||
if status_code == 200:
|
|
||||||
log_trace(correlation_id, "Successfully fetched data from {} on attempt {}".format(url, attempt))
|
|
||||||
return content
|
|
||||||
else:
|
|
||||||
raise Exception("Failed to fetch: {}".format(status_code))
|
|
||||||
except Exception as e:
|
|
||||||
log_trace(correlation_id, "Attempt {} failed: {}".format(attempt, str(e)))
|
|
||||||
if attempt < max_retries:
|
|
||||||
time.sleep(delay / 1000.0)
|
|
||||||
delay = min(delay * 2, max_delay)
|
|
||||||
|
|
||||||
raise Exception("Failed to fetch data after {} attempts".format(max_retries))
|
|
||||||
|
|
||||||
|
|
||||||
def plik_oneshot_upload(fileserver_url, dataname, data):
|
|
||||||
"""Upload a single file to a plik server using one-shot mode.
|
|
||||||
|
|
||||||
This function uploads raw byte data to a plik server in one-shot mode (no upload session).
|
|
||||||
It first creates a one-shot upload session by sending a POST request with {"OneShot": true},
|
|
||||||
retrieves an upload ID and token, then uploads the file data as multipart form data using the token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
fileserver_url: Base URL of the plik server (e.g., "http://localhost:8080")
|
|
||||||
dataname: Name of the file being uploaded
|
|
||||||
data: Raw byte data of the file content
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with keys:
|
|
||||||
- "status": HTTP server response status
|
|
||||||
- "uploadid": ID of the one-shot upload session
|
|
||||||
- "fileid": ID of the uploaded file within the session
|
|
||||||
- "url": Full URL to download the uploaded file
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> result = plik_oneshot_upload("http://localhost:8080", "test.txt", b"hello world")
|
|
||||||
>>> result["status"], result["uploadid"], result["fileid"], result["url"]
|
|
||||||
"""
|
|
||||||
import json
|
|
||||||
|
|
||||||
try:
|
|
||||||
import urequests
|
|
||||||
except ImportError:
|
|
||||||
import requests as urequests
|
|
||||||
|
|
||||||
# Get upload ID
|
|
||||||
url_get_upload_id = "{}/upload".format(fileserver_url)
|
|
||||||
headers = {"Content-Type": "application/json"}
|
|
||||||
body = json.dumps({"OneShot": True})
|
|
||||||
|
|
||||||
response = urequests.post(url_get_upload_id, headers=headers, data=body)
|
|
||||||
response_json = json.loads(response.text if hasattr(response, 'text') else response.content)
|
|
||||||
|
|
||||||
uploadid = response_json.get("id")
|
|
||||||
uploadtoken = response_json.get("uploadToken")
|
|
||||||
|
|
||||||
# Upload file
|
|
||||||
url_upload = "{}/file/{}".format(fileserver_url, uploadid)
|
|
||||||
headers = {"X-UploadToken": uploadtoken}
|
|
||||||
|
|
||||||
# For Micropython, we need to construct the multipart form data manually
|
|
||||||
# This is a simplified approach
|
|
||||||
boundary = "----WebKitFormBoundary{}".format(uuid.uuid4().hex[:16])
|
|
||||||
|
|
||||||
# Create multipart body
|
|
||||||
part1 = "--{}\r\n".format(boundary)
|
|
||||||
part1 += "Content-Disposition: form-data; name=\"file\"; filename=\"{}\"\r\n".format(dataname)
|
|
||||||
part1 += "Content-Type: application/octet-stream\r\n\r\n"
|
|
||||||
part1_bytes = part1.encode('utf-8')
|
|
||||||
|
|
||||||
part2 = "\r\n--{}--".format(boundary)
|
|
||||||
part2_bytes = part2.encode('utf-8')
|
|
||||||
|
|
||||||
# Combine all parts
|
|
||||||
full_body = part1_bytes + data + part2_bytes
|
|
||||||
|
|
||||||
# Set content type with boundary
|
|
||||||
content_type = "multipart/form-data; boundary={}".format(boundary)
|
|
||||||
|
|
||||||
response = urequests.post(url_upload, headers={"Content-Type": content_type}, data=full_body)
|
|
||||||
response_json = json.loads(response.text if hasattr(response, 'text') else response.content)
|
|
||||||
|
|
||||||
fileid = response_json.get("id")
|
|
||||||
url = "{}/file/{}/{}".format(fileserver_url, uploadid, dataname)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"status": response.status_code,
|
|
||||||
"uploadid": uploadid,
|
|
||||||
"fileid": fileid,
|
|
||||||
"url": url
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def smartsend(subject, data, broker_url=DEFAULT_BROKER_URL, fileserver_url=DEFAULT_FILESERVER_URL,
|
|
||||||
fileserver_upload_handler=plik_oneshot_upload, size_threshold=DEFAULT_SIZE_THRESHOLD,
|
|
||||||
correlation_id=None, msg_purpose="chat", sender_name="NATSBridge",
|
|
||||||
receiver_name="", receiver_id="", reply_to="", reply_to_msg_id="", is_publish=True):
|
|
||||||
"""Send data either directly via NATS or via a fileserver URL, depending on payload size.
|
|
||||||
|
|
||||||
This function intelligently routes data delivery based on payload size relative to a threshold.
|
|
||||||
If the serialized payload is smaller than `size_threshold`, it encodes the data as Base64 and
|
|
||||||
publishes directly over NATS. Otherwise, it uploads the data to a fileserver and publishes
|
|
||||||
only the download URL over NATS.
|
|
||||||
|
|
||||||
API Standard:
|
|
||||||
- Input format: List of (dataname, data, payload_type) tuples
|
|
||||||
- Even single payloads must be wrapped in a list
|
|
||||||
- Each payload can have a different type, enabling mixed-content messages
|
|
||||||
|
|
||||||
Args:
|
|
||||||
subject: NATS subject to publish the message to
|
|
||||||
data: List of (dataname, data, payload_type) tuples to send
|
|
||||||
- dataname: Name of the payload
|
|
||||||
- data: The actual data to send
|
|
||||||
- payload_type: Payload type ("text", "dictionary", "table", "image", "audio", "video", "binary")
|
|
||||||
- Example: [("message", "Hello World!", "text"), ("config", {"key": "value"}, "dictionary")]
|
|
||||||
broker_url: URL of the NATS server
|
|
||||||
fileserver_url: URL of the HTTP file server
|
|
||||||
fileserver_upload_handler: Function to handle fileserver uploads (must return dict with "status", "uploadid", "fileid", "url" keys)
|
|
||||||
size_threshold: Threshold in bytes separating direct vs link transport (default: 1MB)
|
|
||||||
correlation_id: Optional correlation ID for tracing; if None, a UUID is generated
|
|
||||||
msg_purpose: Purpose of the message ("ACK", "NACK", "updateStatus", "shutdown", "chat", etc.)
|
|
||||||
sender_name: Name of the sender
|
|
||||||
receiver_name: Name of the receiver (empty string means broadcast)
|
|
||||||
receiver_id: UUID of the receiver (empty string means broadcast)
|
|
||||||
reply_to: Topic to reply to (empty string if no reply expected)
|
|
||||||
reply_to_msg_id: Message ID this message is replying to
|
|
||||||
is_publish: Whether to automatically publish the message to NATS (default: True)
|
|
||||||
- When True: message is published to NATS
|
|
||||||
- When False: returns envelope and JSON string without publishing
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple: (env, env_json_str) where:
|
|
||||||
- env: MessageEnvelope object with all metadata and payloads
|
|
||||||
- env_json_str: JSON string representation of the envelope for publishing
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> data = [("message", "Hello World!", "text")]
|
|
||||||
>>> env, env_json_str = smartsend("/test", data)
|
|
||||||
>>> # env: MessageEnvelope with all metadata and payloads
|
|
||||||
>>> # env_json_str: JSON string for publishing
|
|
||||||
"""
|
|
||||||
# Generate correlation ID if not provided
|
|
||||||
cid = correlation_id if correlation_id is not None else str(uuid.uuid4())
|
|
||||||
|
|
||||||
log_trace(cid, "Starting smartsend for subject: {}".format(subject))
|
|
||||||
|
|
||||||
# Generate message metadata
|
|
||||||
msg_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# Process each payload in the list
|
|
||||||
payloads = []
|
|
||||||
|
|
||||||
for dataname, payload_data, payload_type in data:
|
|
||||||
# Serialize data based on type
|
|
||||||
payload_bytes = _serialize_data(payload_data, payload_type)
|
|
||||||
|
|
||||||
payload_size = len(payload_bytes)
|
|
||||||
log_trace(cid, "Serialized payload '{}' (payload_type: {}) size: {} bytes".format(
|
|
||||||
dataname, payload_type, payload_size))
|
|
||||||
|
|
||||||
# Decision: Direct vs Link
|
|
||||||
if payload_size < size_threshold:
|
|
||||||
# Direct path - Base64 encode and send via NATS
|
|
||||||
# Convert to base64 string for JSON
|
|
||||||
try:
|
|
||||||
import ubinascii
|
|
||||||
payload_b64_str = ubinascii.b2a_base64(payload_bytes).decode('utf-8').strip()
|
|
||||||
except ImportError:
|
|
||||||
import base64
|
|
||||||
payload_b64_str = base64.b64encode(payload_bytes).decode('utf-8')
|
|
||||||
|
|
||||||
log_trace(cid, "Using direct transport for {} bytes".format(payload_size))
|
|
||||||
|
|
||||||
# Create MessagePayload for direct transport
|
|
||||||
payload = MessagePayload(
|
|
||||||
payload_b64_str,
|
|
||||||
payload_type,
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
dataname=dataname,
|
|
||||||
transport="direct",
|
|
||||||
encoding="base64",
|
|
||||||
size=payload_size,
|
|
||||||
metadata={"payload_bytes": payload_size}
|
|
||||||
)
|
|
||||||
payloads.append(payload)
|
|
||||||
else:
|
|
||||||
# Link path - Upload to HTTP server, send URL via NATS
|
|
||||||
log_trace(cid, "Using link transport, uploading to fileserver")
|
|
||||||
|
|
||||||
# Upload to HTTP server
|
|
||||||
response = fileserver_upload_handler(fileserver_url, dataname, payload_bytes)
|
|
||||||
|
|
||||||
if response.get("status") != 200:
|
|
||||||
raise Exception("Failed to upload data to fileserver: {}".format(response.get("status")))
|
|
||||||
|
|
||||||
url = response.get("url")
|
|
||||||
log_trace(cid, "Uploaded to URL: {}".format(url))
|
|
||||||
|
|
||||||
# Create MessagePayload for link transport
|
|
||||||
payload = MessagePayload(
|
|
||||||
url,
|
|
||||||
payload_type,
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
dataname=dataname,
|
|
||||||
transport="link",
|
|
||||||
encoding="none",
|
|
||||||
size=payload_size,
|
|
||||||
metadata={}
|
|
||||||
)
|
|
||||||
payloads.append(payload)
|
|
||||||
|
|
||||||
# Create MessageEnvelope with all payloads
|
|
||||||
env = MessageEnvelope(
|
|
||||||
subject,
|
|
||||||
payloads,
|
|
||||||
correlation_id=cid,
|
|
||||||
msg_id=msg_id,
|
|
||||||
msg_purpose=msg_purpose,
|
|
||||||
sender_name=sender_name,
|
|
||||||
sender_id=str(uuid.uuid4()),
|
|
||||||
receiver_name=receiver_name,
|
|
||||||
receiver_id=receiver_id,
|
|
||||||
reply_to=reply_to,
|
|
||||||
reply_to_msg_id=reply_to_msg_id,
|
|
||||||
broker_url=broker_url,
|
|
||||||
metadata={}
|
|
||||||
)
|
|
||||||
|
|
||||||
msg_json = env.to_json()
|
|
||||||
|
|
||||||
# Publish to NATS if is_publish is True
|
|
||||||
if is_publish:
|
|
||||||
nats_conn = NATSConnection(broker_url)
|
|
||||||
nats_conn.connect()
|
|
||||||
nats_conn.publish(subject, msg_json)
|
|
||||||
nats_conn.close()
|
|
||||||
|
|
||||||
# Return tuple of (envelope, json_string) for both direct and link transport
|
|
||||||
return (env, msg_json)
|
|
||||||
|
|
||||||
|
|
||||||
def smartreceive(msg, fileserver_download_handler=_fetch_with_backoff, max_retries=5,
|
|
||||||
base_delay=100, max_delay=5000):
|
|
||||||
"""Receive and process messages from NATS.
|
|
||||||
|
|
||||||
This function processes incoming NATS messages, handling both direct transport
|
|
||||||
(base64 decoded payloads) and link transport (URL-based payloads).
|
|
||||||
|
|
||||||
API Standard:
|
|
||||||
- Returns a dictionary with envelope metadata and 'payloads' field
|
|
||||||
- payloads field contains list of (dataname, data, payload_type) tuples
|
|
||||||
- Supports mixed-content messages with different payload types
|
|
||||||
|
|
||||||
Args:
|
|
||||||
msg: NATS message to process (dict or JSON string with envelope data)
|
|
||||||
fileserver_download_handler: Function to handle downloading data from file server URLs
|
|
||||||
Receives: (url, max_retries, base_delay, max_delay, correlation_id)
|
|
||||||
Returns: bytes (the downloaded data)
|
|
||||||
max_retries: Maximum retry attempts for fetching URL (default: 5)
|
|
||||||
base_delay: Initial delay for exponential backoff in ms (default: 100)
|
|
||||||
max_delay: Maximum delay for exponential backoff in ms (default: 5000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Envelope dictionary with metadata and 'payloads' field containing list of
|
|
||||||
(dataname, data, payload_type) tuples
|
|
||||||
- Envelope fields: correlation_id, msg_id, timestamp, send_to, msg_purpose,
|
|
||||||
sender_name, sender_id, receiver_name, receiver_id, reply_to, reply_to_msg_id,
|
|
||||||
broker_url, metadata
|
|
||||||
- payloads: List of (dataname, data, payload_type) tuples
|
|
||||||
|
|
||||||
Example:
|
|
||||||
>>> env = smartreceive(msg)
|
|
||||||
>>> # env contains envelope metadata and payloads field
|
|
||||||
>>> # env["payloads"] = [(dataname1, data1, payload_type1), ...]
|
|
||||||
>>> for dataname, data, payload_type in env["payloads"]:
|
|
||||||
... print("Received {} of type {}: {}".format(dataname, payload_type, data))
|
|
||||||
"""
|
|
||||||
# Parse the JSON envelope
|
|
||||||
json_data = msg if isinstance(msg, dict) else json.loads(msg)
|
|
||||||
correlation_id = json_data.get("correlation_id", "")
|
|
||||||
log_trace(correlation_id, "Processing received message")
|
|
||||||
|
|
||||||
# Process all payloads in the envelope
|
|
||||||
payloads_list = []
|
|
||||||
|
|
||||||
# Get number of payloads
|
|
||||||
num_payloads = len(json_data.get("payloads", []))
|
|
||||||
|
|
||||||
for i in range(num_payloads):
|
|
||||||
payload = json_data["payloads"][i]
|
|
||||||
transport = payload.get("transport", "")
|
|
||||||
dataname = payload.get("dataname", "")
|
|
||||||
|
|
||||||
if transport == "direct":
|
|
||||||
log_trace(correlation_id,
|
|
||||||
"Direct transport - decoding payload '{}'".format(dataname))
|
|
||||||
|
|
||||||
# Extract base64 payload from the payload
|
|
||||||
payload_b64 = payload.get("data", "")
|
|
||||||
|
|
||||||
# Decode Base64 payload
|
|
||||||
try:
|
|
||||||
import ubinascii
|
|
||||||
payload_bytes = ubinascii.a2b_base64(payload_b64.encode('utf-8'))
|
|
||||||
except ImportError:
|
|
||||||
import base64
|
|
||||||
payload_bytes = base64.b64decode(payload_b64)
|
|
||||||
|
|
||||||
# Deserialize based on type
|
|
||||||
payload_type = payload.get("payload_type", "")
|
|
||||||
data = _deserialize_data(payload_bytes, payload_type, correlation_id)
|
|
||||||
|
|
||||||
payloads_list.append((dataname, data, payload_type))
|
|
||||||
|
|
||||||
elif transport == "link":
|
|
||||||
# Extract download URL from the payload
|
|
||||||
url = payload.get("data", "")
|
|
||||||
log_trace(correlation_id,
|
|
||||||
"Link transport - fetching '{}' from URL: {}".format(dataname, url))
|
|
||||||
|
|
||||||
# Fetch with exponential backoff
|
|
||||||
downloaded_data = fileserver_download_handler(
|
|
||||||
url, max_retries, base_delay, max_delay, correlation_id
|
|
||||||
)
|
|
||||||
|
|
||||||
# Deserialize based on type
|
|
||||||
payload_type = payload.get("payload_type", "")
|
|
||||||
data = _deserialize_data(downloaded_data, payload_type, correlation_id)
|
|
||||||
|
|
||||||
payloads_list.append((dataname, data, payload_type))
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError("Unknown transport type for payload '{}': {}".format(dataname, transport))
|
|
||||||
|
|
||||||
# Replace payloads field with the processed list of (dataname, data, payload_type) tuples
|
|
||||||
json_data["payloads"] = payloads_list
|
|
||||||
|
|
||||||
return json_data
|
|
||||||
|
|
||||||
|
|
||||||
# Utility functions
|
|
||||||
def generate_uuid():
|
|
||||||
"""Generate a UUID string."""
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
|
|
||||||
def get_timestamp():
|
|
||||||
"""Get current timestamp in ISO format."""
|
|
||||||
return time.strftime("%Y-%m-%dT%H:%M:%S", time.localtime())
|
|
||||||
|
|
||||||
|
|
||||||
# Example usage
|
|
||||||
if __name__ == "__main__":
|
|
||||||
print("NATSBridge - Bi-Directional Data Bridge")
|
|
||||||
print("=======================================")
|
|
||||||
print("This module provides:")
|
|
||||||
print(" - MessageEnvelope: Message envelope structure with snake_case fields")
|
|
||||||
print(" - MessagePayload: Payload structure with payload_type field")
|
|
||||||
print(" - smartsend: Send data via NATS with automatic transport selection")
|
|
||||||
print(" - smartreceive: Receive and process messages from NATS")
|
|
||||||
print(" - plik_oneshot_upload: Upload files to HTTP file server")
|
|
||||||
print(" - _fetch_with_backoff: Fetch data from URLs with retry logic")
|
|
||||||
print()
|
|
||||||
print("Usage:")
|
|
||||||
print(" from nats_bridge import smartsend, smartreceive")
|
|
||||||
print()
|
|
||||||
print(" # Send data (list of (dataname, data, payload_type) tuples)")
|
|
||||||
print(" # Even single payloads must be wrapped in a list")
|
|
||||||
print(" data = [(\"message\", \"Hello World!\", \"text\")]")
|
|
||||||
print(" env, env_json_str = smartsend(\"my.subject\", data)")
|
|
||||||
print()
|
|
||||||
print(" # On receiver:")
|
|
||||||
print(" env = smartreceive(msg)")
|
|
||||||
print(" # env contains envelope metadata and payloads field")
|
|
||||||
print(" for dataname, data, payload_type in env[\"payloads\"]:")
|
|
||||||
print(" print(\"Received {} of type {}: {}\".format(dataname, payload_type, data))")
|
|
||||||
print()
|
|
||||||
print(" # Mixed-content message example:")
|
|
||||||
print(" mixed_data = [")
|
|
||||||
print(" (\"text\", \"Hello!\", \"text\"),")
|
|
||||||
print(" (\"config\", {\"key\": \"value\"}, \"dictionary\"),")
|
|
||||||
print(" (\"table\", [{\"id\": 1}], \"table\")")
|
|
||||||
print(" ]")
|
|
||||||
print(" smartsend(\"/chat\", mixed_data)")
|
|
||||||
Reference in New Issue
Block a user