0
votes

I'm trying to encode an image to video using ffmpeg library. I have these global params:

//Global params
AVCodec         *codec;
AVCodecContext  *codecCtx;
uint8_t         *output_buffer;
int             output_buffer_size;

I divided the encoding to 3 methods: Initialize the encoder:

jint Java_com_camera_simpledoublewebcams2_CameraPreview_initencoder(JNIEnv* env,jobject thiz){
avcodec_register_all();
avcodec_init();
av_register_all();

int fps = 30;

/* find the H263 video encoder */
codec = avcodec_find_encoder(CODEC_ID_H263);
if (!codec) {
    LOGI("avcodec_find_encoder() run fail.");
    return -5;
}

//allocate context
codecCtx = avcodec_alloc_context();

/* put sample parameters */
codecCtx->bit_rate = 400000;
/* resolution must be a multiple of two */
codecCtx->width = 176;
codecCtx->height = 144;
/* frames per second */
codecCtx->time_base = (AVRational){1,fps};
codecCtx->pix_fmt = PIX_FMT_YUV420P;
codecCtx->codec_id = CODEC_ID_H263;
codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;

/* open it */
if (avcodec_open(codecCtx, codec) < 0) {
    LOGI("avcodec_open() run fail.");
    return -10;
}

//init buffer
output_buffer_size = 500000;
output_buffer = malloc(output_buffer_size);

return 0;

}

Encoding the image:

jint Java_com_camera_simpledoublewebcams2_CameraPreview_encodejpeg(JNIEnv* env,jobject thiz,jchar* cImage, jint imageSize){
int             out_size;
AVFrame         *picture;
AVFrame         *outpic;
uint8_t         *outbuffer;

//allocate frame    
picture = avcodec_alloc_frame();    
outpic = avcodec_alloc_frame();

int nbytes = avpicture_get_size(PIX_FMT_YUV420P, codecCtx->width, codecCtx->height);
outbuffer = (uint8_t*)av_malloc(nbytes);
outpic->pts = 0;

//fill picture with image
avpicture_fill((AVPicture*)picture, (uint8_t*)cImage, PIX_FMT_RGBA, codecCtx->width, codecCtx->height);
//fill outpic with empty image
avpicture_fill((AVPicture*)outpic, outbuffer, PIX_FMT_YUV420P, codecCtx->width, codecCtx->height);

//rescale the image 
struct SwsContext* fooContext = sws_getContext(codecCtx->width, codecCtx->height, 
                                                       PIX_FMT_RGBA, 
                                                       codecCtx->width, codecCtx->height, 
                                                       PIX_FMT_YUV420P, 
                                                       SWS_FAST_BILINEAR, NULL, NULL, NULL);
sws_scale(fooContext, picture->data, picture->linesize, 0, codecCtx->height, outpic->data, outpic->linesize);   

//encode the image
out_size = avcodec_encode_video(codecCtx, output_buffer, output_buffer_size, outpic);
out_size += avcodec_encode_video(codecCtx, output_buffer, output_buffer_size, outpic);

//release pictures
av_free(outbuffer);
av_free(picture);
av_free(outpic);

return out_size;

}

And closing the encoder:

void Java_com_camera_simpledoublewebcams2_CameraPreview_closeencoder(JNIEnv* env,jobject thiz){
free(output_buffer);
avcodec_close(codecCtx);
av_free(codecCtx);

}

When I send the first image, I get a result from the encoder. When I try to send another image the program crashes. I tried calling init once and then the images, then the close - didn't work. I tried calling the init and the close for every image - didn't work.

Any suggestions?

Thanks!

EDIT: After further research I found that the problem is at sws_scale method. Still don't know what is causing this issue...

1
hai bahar_p did u get the answer for this.i m also facing the same issue.this is my mail id [email protected] - rams

1 Answers

1
votes

out_size = avcodec_encode_video(codecCtx, output_buffer,output_buffer_size, outpic);

out_size += avcodec_encode_video(codecCtx, output_buffer, output_buffer_size,outpic);

Why are you encoding twice?

Perhaps the error is due to that double encoding. Try removing the second encoding.