Skip to content

Instantly share code, notes, and snippets.

@un4ckn0wl3z
Last active September 12, 2025 04:09
Show Gist options
  • Select an option

  • Save un4ckn0wl3z/cdac58f45fc7cb5698225031dd183687 to your computer and use it in GitHub Desktop.

Select an option

Save un4ckn0wl3z/cdac58f45fc7cb5698225031dd183687 to your computer and use it in GitHub Desktop.

Revisions

  1. un4ckn0wl3z revised this gist Sep 12, 2025. 1 changed file with 19 additions and 20 deletions.
    39 changes: 19 additions & 20 deletions stream_readable.js
    Original file line number Diff line number Diff line change
    @@ -7,40 +7,39 @@ const data = Array.from({ length: 100000 }, (_, i) => ({
    value: Math.random().toFixed(4)
    }));

    // Async generator to yield JSON array chunks as NDJSON
    async function* generateJsonChunks(jsonArray, chunkSize = 10) {
    // Async generator to yield CSV chunks from JSON array
    async function* generateCsvChunks(jsonArray, chunkSize = 10) {
    let isFirstChunk = true;
    for (let i = 0; i < jsonArray.length; i += chunkSize) {
    // Take a chunk of the array
    const chunk = jsonArray.slice(i, i + chunkSize);
    // Convert to NDJSON: each object on a new line
    const chunkString = chunk.map(item => JSON.stringify(item) + '\n').join('');
    // Convert to CSV
    let chunkString = '';
    if (isFirstChunk) {
    chunkString += 'id,name,value\n'; // Header for first chunk
    isFirstChunk = false;
    }
    chunkString += chunk
    .map(item => `${item.id || 'N/A'},${item.name || 'N/A'},${item.value || 'N/A'}`)
    .join('\n') + '\n';
    yield Buffer.from(chunkString); // Yield as Buffer for efficiency
    // Optional: Simulate real-time processing (remove if not needed)
    await new Promise(resolve => setTimeout(resolve, 10));
    }
    }

    // Alternative: Full JSON array (not NDJSON, yields entire array in chunks)
    async function* generateFullJson(jsonArray, chunkSize = 1024) {
    const jsonString = JSON.stringify(jsonArray); // Full array as string
    for (let i = 0; i < jsonString.length; i += chunkSize) {
    const chunk = jsonString.slice(i, i + chunkSize);
    yield Buffer.from(chunk);
    }
    }

    const server = http.createServer((req, res) => {
    if (req.url === '/stream-json' && req.method === 'GET') {
    // Set headers for NDJSON streaming
    if (req.url === '/stream-csv' && req.method === 'GET') {
    // Set headers for CSV streaming
    res.writeHead(200, {
    'Content-Type': 'application/x-ndjson', // Use application/json for full JSON
    'Content-Type': 'text/csv',
    'Transfer-Encoding': 'chunked',
    'Content-Disposition': 'attachment; filename="data.jsonl"'
    'Content-Disposition': 'attachment; filename="data.csv"'
    });

    // Use NDJSON generator (swap to generateFullJson for full JSON)
    // Use CSV generator
    const { Readable } = require('stream');
    const readable = Readable.from(generateJsonChunks(data, 10));
    const readable = Readable.from(generateCsvChunks(data, 10));
    readable.pipe(res);

    // Handle errors
    @@ -59,5 +58,5 @@ const server = http.createServer((req, res) => {

    // Start server
    server.listen(3000, () => {
    console.log('Server running at http://localhost:3000/stream-json');
    console.log('Server running at http://localhost:3000/stream-csv');
    });
  2. un4ckn0wl3z created this gist Sep 12, 2025.
    63 changes: 63 additions & 0 deletions stream_readable.js
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,63 @@
    const http = require('http');

    // Sample predefined JSON array (100 records for example)
    const data = Array.from({ length: 100000 }, (_, i) => ({
    id: i + 1,
    name: `Item${i + 1}`,
    value: Math.random().toFixed(4)
    }));

    // Async generator to yield JSON array chunks as NDJSON
    async function* generateJsonChunks(jsonArray, chunkSize = 10) {
    for (let i = 0; i < jsonArray.length; i += chunkSize) {
    // Take a chunk of the array
    const chunk = jsonArray.slice(i, i + chunkSize);
    // Convert to NDJSON: each object on a new line
    const chunkString = chunk.map(item => JSON.stringify(item) + '\n').join('');
    yield Buffer.from(chunkString); // Yield as Buffer for efficiency
    // Optional: Simulate real-time processing (remove if not needed)
    await new Promise(resolve => setTimeout(resolve, 10));
    }
    }

    // Alternative: Full JSON array (not NDJSON, yields entire array in chunks)
    async function* generateFullJson(jsonArray, chunkSize = 1024) {
    const jsonString = JSON.stringify(jsonArray); // Full array as string
    for (let i = 0; i < jsonString.length; i += chunkSize) {
    const chunk = jsonString.slice(i, i + chunkSize);
    yield Buffer.from(chunk);
    }
    }

    const server = http.createServer((req, res) => {
    if (req.url === '/stream-json' && req.method === 'GET') {
    // Set headers for NDJSON streaming
    res.writeHead(200, {
    'Content-Type': 'application/x-ndjson', // Use application/json for full JSON
    'Transfer-Encoding': 'chunked',
    'Content-Disposition': 'attachment; filename="data.jsonl"'
    });

    // Use NDJSON generator (swap to generateFullJson for full JSON)
    const { Readable } = require('stream');
    const readable = Readable.from(generateJsonChunks(data, 10));
    readable.pipe(res);

    // Handle errors
    readable.on('error', (err) => {
    console.error('Stream error:', err);
    if (!res.headersSent) {
    res.statusCode = 500;
    res.end('Internal Server Error');
    }
    });
    } else {
    res.writeHead(404);
    res.end('Not Found');
    }
    });

    // Start server
    server.listen(3000, () => {
    console.log('Server running at http://localhost:3000/stream-json');
    });