I have an application that writes to a DynamoDB table, and I'm trying to get Kinesis to do my aggregation then write the aggregated data to another DynamoDB table.
Streams are enabled on my DynamoDB table, and I have a Lamdba trigger on the stream as follows:
'use strict';
var AWS = require('aws-sdk');
var kinesis = new AWS.Kinesis();
exports.handler = (event, context, callback) => {
event.Records.forEach((record) => {
var myValue = record.dynamodb.NewImage.myValue.N;
var partitionKey = record.key.S;
var data = '{"VALUE":"' + myValue + '"}';
var recordParams = {
Data: data,
PartitionKey: partitionKey,
StreamName: 'MyStreamName'
};
console.log('Try Put to Kinesis Stream');
kinesis.putRecord(recordParams, function(err, data) {
if (err) {
console.log('Failed Put');
} else {
console.log('Successful Put');
}
});
});
};
This writes successfully to my Kinesis Stream when I have three or four elements in the Lambda test event.
When I enable my trigger it does not write to my Kinesis Stream at all. There appears to be about 100 elements coming in at a time. In Cloudwatch I see the 'Try Put to Kinesis Stream' message, but I don't even see the Success/Failure messages.
Am I doing something completely wrong or there a better approach to this problem?
If DynamoDB's stream could feed straight into Kinesis Analytics that would be my first prize :)