1st commit

This commit is contained in:
2026-02-10 06:55:29 +07:00
commit 9ecd81c400
17 changed files with 3174 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env node
// Scenario 2: Deep Dive Analysis (Large Arrow Table)
// Tests large Arrow tables (> 1MB) sent via HTTP fileserver
const { SmartSend } = require('../js_bridge');
// Configuration
const ANALYSIS_SUBJECT = "analysis_results";
const NATS_URL = "nats://localhost:4222";
// Create correlation ID for tracing
const correlationId = require('uuid').v4();
// Sender: Send large Arrow table to Julia
async function sendLargeTable() {
// Create a large DataFrame-like structure (10 million rows)
// For testing, we'll create a smaller but still large table
const numRows = 1000000; // 1 million rows
const data = {
id: Array.from({ length: numRows }, (_, i) => i + 1),
value: Array.from({ length: numRows }, () => Math.random()),
category: Array.from({ length: numRows }, () => ['A', 'B', 'C'][Math.floor(Math.random() * 3)])
};
// Convert to Arrow Table
const { Table, Vector, RecordBatch } = require('apache-arrow');
const idVector = Vector.from(data.id);
const valueVector = Vector.from(data.value);
const categoryVector = Vector.from(data.category);
const table = Table.from({
id: idVector,
value: valueVector,
category: categoryVector
});
// Send via SmartSend with type="table"
const env = await SmartSend(
ANALYSIS_SUBJECT,
table,
"table",
{ correlationId }
);
console.log(`Sent large table with ${numRows} rows`);
console.log(`Correlation ID: ${correlationId}`);
console.log(`Transport: ${env.transport}`);
console.log(`URL: ${env.url || 'N/A'}`);
}
// Run the sender
sendLargeTable().catch(console.error);