2
votes

I have Sensor IoT Data which is getting stored in cosmos DB . I need minute level aggregation of data for last 30 days to show in webapp. So i have written stored procedure for it to do group by and aggregations of sensors data.

it is giving following error

Failed to execute stored procedure something for collection newData: {"code":400,"body":"{\"code\":\"BadRequest\",\"message\":\"Message: {\\"Errors\\":[\\"Encountered exception while executing function. Exception = Error: Resulting message would be too large because of \\\\"x-ms-documentdb-script-log-results\\\\". Return from script with current message and use continuation token to call the script again or modify your script.\\r\\nStack trace: Error: Resulting message would be too large because of \\\\"x-ms-documentdb-script-log-results\\\\". Return from script with current message and use continuation token to call the script again or modify your script.\\n at validateSize (sa\\"]}\r\nActivityId: 46713736-fe18-4fd1-8df1-49fa615c7289, Request URI: /apps/35edbe01-33d4-4189-9959-240fe985a75e/services/f3955cd0-044e-4a48-ad24-ae51a24c13b8/partitions/784e5371-950b-476d-a765-52ddb784f8dd/replicas/131850819031922581p/, RequestStats: \r\nRequestStartTime: 2018-11-06T21:08:06.7386246Z, Number of regions attempted: 1\r\n, SDK: Microsoft.Azure.Documents.Common/2.1.0.0\"}","activityId":"46713736-fe18-4fd1-8df1-49fa615c7289","substatus":413}

As the sensors which has 1 sec level data for which i need to find 1 minute aggregates i cannot use stream Analytics Job as forward i don't know what are the sensor names to keep in avg() .

Only option left out for me is run the stored procedure and get back the 1 minute aggregates.

I have use Javascript for stored procedure and in My api call i am using java Spring boot. kindly give me any suggestion how can i surpass this limitation Cosmos DB or how can i store 1 min aggregates in Cosmos DB so that i can retrieve those records.

following is my procedure and I am passing 3 string parameters Like "pressure,Temp,volume" "123444" "345552"

function something(variable1 , variable2 , variable3) {


var variables = variable1.split(",");
var parameters = variable1.split(",");
var variablestrings = '' , totals = {} ;
var keys = [] , values = [] , totals = [] ;
var dataPoints = {} , results = [], k , l  , p ,i;
var resultPoints = [],value;

var collection = getContext().getCollection();

for ( var i = 0 ; i < variables.length ; i = i +1 ) {
        results[i] = {};
        results[i].variableName = variables[i] ; 
        results[i].data = [];
        variables[i] = 'r.' + variables[i];
} 

variablestrings = variables.toString() + ' , r._ts ' ; 

var queryString = 'SELECT ' + variablestrings + 'FROM root r where r._ts between  ' +  variable2 + ' AND ' + variable3 ;   

// Query documents and take 1st item.
var isAccepted = collection.queryDocuments(
    collection.getSelfLink(),
    queryString,
function (err, feed, options) {
   // console.log(feed[0]._ts);
    if (err) throw err;

    // Check the feed and if empty, set the body to 'no docs found', 
    // else take 1st element from feed
    if (!feed || !feed.length) {
        var response = getContext().getResponse();
        response.setBody('no docs found');
    }
    else {
            //console.log(feed.length);
            feed.forEach( (item, index) => { 
            //var x = JSON.parse( item ) ; 
            var d1 = new Date( item._ts ) ; 
            item['timeInMinutes'] =  d1.getFullYear() + '-' + d1.getUTCMonth() + '-' + d1.getUTCDate() + ' ' + d1.getUTCHours() + ':' + d1.getUTCMinutes() + ':00' ;
            //delete item[_ts];
            //console.log(JSON.stringify(item));
            if( ! dataPoints[ item['timeInMinutes'] ] ) {
                 dataPoints[ item['timeInMinutes'] ] = [];
                dataPoints[ item['timeInMinutes'] ].push(item);
            }
            else{
                dataPoints[ item['timeInMinutes'] ].push(item);

            }
            //arrayObjects.push( item ) ;

            }  ); 

        values  = Object.values( dataPoints ) ;
            for ( k = 0 ; k < values.length ; k = k+1){
                value = values[k] ;

                for (  l = 0 ; l < parameters.length ; l = l +1){
                    totals[ parameters[l] ]= 0;
                }

                for (  p = 0 ; p < parameters.length ; p = p +1){

                    for (  i = 0 ; i < value.length ; i = i +1 ){
                        totals[ parameters[p]] = value[i][parameters[p]] + totals[parameters[p]] ; 
                    }

                    results[p].data.push({ 'x-axis' : value[0]['timeInMinutes'] , 'y-axis' : (totals[ parameters[p] ] / value.length ) });

                }
            }
            //console.log(results[0]);
            var response = getContext().getResponse();        
            response.setBody(results);


    }
});

if (!isAccepted) throw new Error('The query was not accepted by the server.');

}

1
Can you share what RequestOptions, if any, you're passing along? You're currently returning logs in your request, which makes the response very large. Can you remove those logs?Chris Anderson-MSFT
@ChrisAnderson-MSFT I have my code and request parameters mentioned , can suggest me how can i remove logs in response.Amjath Khan
I don't see your RequestOptions configuration. If possible, set script logging to false in your request options before you execute. docs.microsoft.com/en-us/java/api/… Alternatively, you can delete some of your console logs. Right now, your console statements are logging a bunch of stuff, which makes the response throw.Chris Anderson-MSFT
@ChrisAnderson-MSFT i have removed the console.log statements.Amjath Khan
@ChrisAnderson-MSFT I am realtively new to javascript environment i have added setScriptLoggingEnabled(false); It is showing setScriptLoggingEnable is undefined. Do i need to import any thing here.Amjath Khan

1 Answers

1
votes

Based on @Chris Anderson-MSFT suggestion in comments to OP's question.


In my case, my Stored Procedure written in javascript had few logging statements. Simply removing console.logs worked for me.