1
votes

I really can't understand why when I run a bulk insert I lost the previous data in the same collection without executing any delete operation? this is weird.

any idea?

var client = new elasticsearch.Client( {  
  hosts: [
    'http://localhost:9200/'    
  ]
})

. . .

InserTweets: function (arrayobj, callback) {
        var items=[];

        var count=1;
        arrayobj.forEach(element => {
            items.push({ index:  { _index: 'twitter', _type: 'tweet', _id: count }},element);
            count++;
        });


         client.bulk({body:items}, function (err, resp, status) {
            callback(err, resp, status);
        }, function (err, resp, status) {
           console.log(err);
        }); 
    }
1

1 Answers

4
votes

You are setting the _id to the count so on the second operation its overwriting/updating the existing record to the new record.

The _id needs to be unique for each record.

has element got anything unique like its own id which you could use?