Array Access / Node.js – Stack Overflow

I have a question related to node.js and a **potential **concurrency problem – I have tested my application properly and yet have not encountered any problems. However I want to double-verify with you as it could lead to a race condition that im not aware of.

I have an application which receives via REST-POST (function ProcessRestQuery) ~ 2000 requests per second that I do store in an array view_requests.

Every 30 seconds, Im going to dump those entries to a file, thereby writing only those older than the current timestamp and then removing them. (via filter)

Question: do you see any potential concurrency problems here? I’m not 100% sure if the filter function is concurerncy safe?

In C++ I would have used a mutex to secure those operations, but under node.js im not sure.

Thank you very much
Sebastian

let view_requests = [];
async function ProcessRestQuery(req,res){

        const FORMDATA = req.body;
        
         // this is where I populate the array
        await view_requests.push ({         
                timestamp: new Date(),
                CSV: [FORMDATA.TEST_ID,FORMDATA.TEST_ID2,FORMDATA.TEST_ID_3,FORMDATA.TS].join(';'),
        
        res.status(200).json({ message: "OK" });    
}


app.post("/test/*",   async (req, res) => { 
    await ProcessRestQuery(req, res);
});


//this function will read out the array "view_requests" and dump all entries as a CSV File to disk
async function DumpMemoryToFile() { 
  try{
        const now = Date.now();     
        // Question: could it be that this filter function get somehow interrupted by ProcessRestQuery?
        const view_oldRequests = await view_requests.filter(view_request => view_request.timestamp < now);          

        
        if (view_oldRequests.length > 0) {          
            const csv =  await view_oldRequests.map(view_oldRequest => view_oldRequest.CSV);
            const csvs = await csv.join('\n');      
            await fs.writeFile("out/data" + '.dat',csvs, (err) => {
            if (err) {
                console.log("Error writing file", err);
            } else {
                console.log('File created');
                // Question 2: If I filter out old items - could it be that during this operation incoming requests would be cut out? 
                 view_requests =  view_requests.filter(view_request => view_request.timestamp >= now);
                
            }
            });
      }       
    }catch (error) {
        fs.appendFile('error.txt', `Server: setInterval Fehler :` + error.message, (err)=> {console.error(`Server: setInterval Error:`, error.message);});
    }
}

setInterval( async () => {
    
    await DumpMemoryToFile();
    
}, 1 * 30 * 1000); 

I observed the node.js program both in debugger and under load testing environment with expected results, however im not sure if a race condition could lead to a loss of data.

Read more here: Source link

Array Access / Node.js – Stack Overflow

I have a question related to node.js and a **potential **concurrency problem – I have tested my application properly and yet have not encountered any problems. However I want to double-verify with you as it could lead to a race condition that im not aware of.

I have an application which receives via REST-POST (function ProcessRestQuery) ~ 2000 requests per second that I do store in an array view_requests.

Every 30 seconds, Im going to dump those entries to a file, thereby writing only those older than the current timestamp and then removing them. (via filter)

Question: do you see any potential concurrency problems here? I’m not 100% sure if the filter function is concurerncy safe?

In C++ I would have used a mutex to secure those operations, but under node.js im not sure.

Thank you very much
Sebastian

let view_requests = [];
async function ProcessRestQuery(req,res){

        const FORMDATA = req.body;
        
         // this is where I populate the array
        await view_requests.push ({         
                timestamp: new Date(),
                CSV: [FORMDATA.TEST_ID,FORMDATA.TEST_ID2,FORMDATA.TEST_ID_3,FORMDATA.TS].join(';'),
        
        res.status(200).json({ message: "OK" });    
}


app.post("/test/*",   async (req, res) => { 
    await ProcessRestQuery(req, res);
});


//this function will read out the array "view_requests" and dump all entries as a CSV File to disk
async function DumpMemoryToFile() { 
  try{
        const now = Date.now();     
        // Question: could it be that this filter function get somehow interrupted by ProcessRestQuery?
        const view_oldRequests = await view_requests.filter(view_request => view_request.timestamp < now);          

        
        if (view_oldRequests.length > 0) {          
            const csv =  await view_oldRequests.map(view_oldRequest => view_oldRequest.CSV);
            const csvs = await csv.join('\n');      
            await fs.writeFile("out/data" + '.dat',csvs, (err) => {
            if (err) {
                console.log("Error writing file", err);
            } else {
                console.log('File created');
                // Question 2: If I filter out old items - could it be that during this operation incoming requests would be cut out? 
                 view_requests =  view_requests.filter(view_request => view_request.timestamp >= now);
                
            }
            });
      }       
    }catch (error) {
        fs.appendFile('error.txt', `Server: setInterval Fehler :` + error.message, (err)=> {console.error(`Server: setInterval Error:`, error.message);});
    }
}

setInterval( async () => {
    
    await DumpMemoryToFile();
    
}, 1 * 30 * 1000); 

I observed the node.js program both in debugger and under load testing environment with expected results, however im not sure if a race condition could lead to a loss of data.

Read more here: Source link