'Stream big text file to server nodejs

I have a nodejs client streaming a text file of around 200k to a server nodejs app which saves the info in a mongodb table.

It was working good with small files, but using big text files, the server says several times 'data received from...' streaming one unique file, like if the server was breaking the file in pieces and opens in a 170k file stream, 3 entries in mongodb, when I just want one.

Thanks for the help

This is the server:

var net = require('net'),
    fs = require('fs'),
    buffer = require('buffer');

var server = net.createServer(function(conn) {
    console.log('server connected');

});

var HOST = '127.0.0.1';
var PORT = '9001';

// DB Config
var databaseUrl = "logserver";
var collections = ["logs"]
var db = require("mongojs").connect(databaseUrl, collections);


server.listen(PORT, HOST, function() {
    //listening
    console.log('Listening on port ' + PORT + '\n');

    server.on('connection', function(conn) {
        var current_time = Date.now();

        console.log('connection made...\n')
        conn.on('data', function(data) {
            console.log('data received from ' + conn.remoteAddress);
            db.logs.save({log_timestamp: current_time, client_ip: conn.remoteAddress, log_data: data.toString('utf8')}, function(err, saved) {
                if( err || !saved ) console.log("- Log not saved -");
                else console.log("- Log saved -");
            });
        });
    })
});

And this is the client:

var net = require('net');
var fs = require('fs');


var PORT = 9001;
var HOST = '127.0.0.1';
var FILEPATH = 'file.txt';

var client = new net.Socket()

//connect to the server
client.connect(PORT,HOST,function() {
    'Client Connected to server'

    //send a file to the server
    var fileStream = fs.createReadStream(FILEPATH);
    fileStream.on('error', function(err){
        console.log(err);
    })

    fileStream.on('open',function() {
        console.log('Sending log file...');
        fileStream.pipe(client);
    });

});

//handle closed
client.on('close', function() {
    console.log('Connection finished')
});

client.on('error', function(err) {
    console.log(err);
});


Solution 1:[1]

The file is being divided up into chunks because of how TCP works, so you can't totally prevent it. What you can do is reassemble the file on the other side. Here's a simple way to do so:

var chunks = [];
conn.on('data', function(chunk) {
    chunks.push(chunk);
});
conn.on('end', function() {
    var data = Buffer.concat(chunks);
    db.logs.save(...);
});

Sources

This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.

Source: Stack Overflow

Solution Source
Solution 1 ZachRabbit