2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
10 #include "libavformat/avformat.h"
11 #include "libavcodec/avcodec.h"
12 #include "libswscale/swscale.h"
13 #include "libavutil/imgutils.h"
14 #include "libavutil/time.h"
15 #include "libavdevice/avdevice.h"
19 AVFormatContext *ofmt_ctx;
21 AVCodecContext* pCodecCtx;
27 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
31 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
52 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
53 jint v = avformat_version();
54 LOGE("######### Ffmpeg JNI version i= %d", v);
57 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
58 avdevice_register_all();
59 av_log_set_callback(custom_log);
60 AVInputFormat *ifmt=av_find_input_format("video4linux2");
61 LOGE("===%s===", ifmt->name);
62 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
63 LOGE("Couldn't open input stream.\n");
64 return env->NewStringUTF("===== error =======");
69 return env->NewStringUTF("====== Ffmpeg call =======");
72 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
74 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
76 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
77 const char* out_path = "/storage/sdcard0/output.flv";
81 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
85 y_length=width*height;
86 uv_length=width*height/4;
92 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
93 //output encoder initialize
94 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
96 LOGE("Can not find encoder!\n");
99 pCodecCtx = avcodec_alloc_context3(pCodec);
100 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
101 pCodecCtx->width = width;
102 pCodecCtx->height = height;
103 pCodecCtx->time_base.num = 1;
104 pCodecCtx->time_base.den = 30;
105 pCodecCtx->bit_rate = 800000;
106 pCodecCtx->gop_size = 300;
107 /* Some formats want stream headers to be separate. */
108 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
109 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
112 //pCodecCtx->me_range = 16;
113 //pCodecCtx->max_qdiff = 4;
114 //pCodecCtx->qcompress = 0.6;
115 pCodecCtx->qmin = 10;
116 pCodecCtx->qmax = 51;
118 pCodecCtx->max_b_frames = 3;
119 // Set H264 preset and tune
120 AVDictionary *param = 0;
121 av_dict_set(¶m, "preset", "ultrafast", 0);
122 av_dict_set(¶m, "tune", "zerolatency", 0);
124 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
125 LOGE("Failed to open encoder!\n");
129 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
130 video_st = avformat_new_stream(ofmt_ctx, pCodec);
131 if (video_st == NULL){
134 video_st->time_base.num = 1;
135 video_st->time_base.den = 30;
136 video_st->codec = pCodecCtx;
138 //Open output URL,set before avformat_write_header() for muxing
140 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
141 LOGE("Failed to open output file! return :%d\n", ret);
146 avformat_write_header(ofmt_ctx, NULL);
148 start_time = av_gettime();
152 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
156 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
161 av_init_packet(&enc_pkt);
162 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
170 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
173 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
174 AVRational r_framerate1 = { 60, 2 };
175 AVRational time_base_q = { 1, AV_TIME_BASE };
176 //Duration between 2 frames (us)
177 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
179 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
180 enc_pkt.dts = enc_pkt.pts;
181 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
183 //转换PTS/DTS(Convert PTS/DTS)
186 ofmt_ctx->duration = enc_pkt.duration * framecnt;
188 /* mux encoded frame */
189 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
194 av_write_trailer(ofmt_ctx);
198 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
200 avcodec_close(video_st->codec);
201 avio_close(ofmt_ctx->pb);
202 avformat_free_context(ofmt_ctx);
206 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
211 //LOGE(" process data - ffmpeg");
212 pFrameYUV = av_frame_alloc();
213 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
214 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
216 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
217 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
218 memcpy(pFrameYUV->data[0],in,y_length);
219 for(i=0;i<uv_length;i++)
221 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
222 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
225 pFrameYUV->format = AV_PIX_FMT_YUV420P;
226 pFrameYUV->width = yuv_width;
227 pFrameYUV->height = yuv_height;
231 av_init_packet(&enc_pkt);
232 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
233 av_frame_free(&pFrameYUV);
235 if (enc_got_frame == 1){
236 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
238 enc_pkt.stream_index = video_st->index;
241 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
242 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
243 AVRational time_base_q = { 1, AV_TIME_BASE };
244 //Duration between 2 frames (us)
245 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
247 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
248 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
249 enc_pkt.dts = enc_pkt.pts;
250 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
254 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
255 int64_t now_time = av_gettime() - start_time;
256 if (pts_time > now_time)
257 av_usleep(pts_time - now_time);
259 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
260 av_free_packet(&enc_pkt);
265 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
271 LOGE("###### video play #####");
272 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
273 const char * file_name = env->GetStringUTFChars(fname, 0);
276 avdevice_register_all();
279 AVFormatContext * pFormatCtx = avformat_alloc_context();
283 av_log_set_callback(custom_log);
285 AVInputFormat *ifmt=av_find_input_format("video4linux2");
286 LOGE("===%s===", ifmt->name);
287 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
288 LOGE("Couldn't open file:\n");
289 return -1; // Couldn't open file
297 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
299 LOGE("Couldn't open file:%s\n", file_name);
300 return -1; // Couldn't open file
303 // Retrieve stream information
304 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
305 LOGE("Couldn't find stream information.");
309 // Find the first video stream
310 int videoStream = -1, i;
311 for (i = 0; i < pFormatCtx->nb_streams; i++) {
312 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
313 && videoStream < 0) {
317 if(videoStream==-1) {
318 LOGE("Didn't find a video stream.");
319 return -1; // Didn't find a video stream
322 // Get a pointer to the codec context for the video stream
323 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
324 LOGE("============= %d ========",__LINE__);
325 // Find the decoder for the video stream
326 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
328 LOGE("Codec not found.");
329 return -1; // Codec not found
332 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
333 LOGE("Could not open codec.");
334 return -1; // Could not open codec
338 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
341 int videoWidth = pCodecCtx->width;
342 int videoHeight = pCodecCtx->height;
344 // 设置native window的buffer大小,可自动拉伸
345 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
346 ANativeWindow_Buffer windowBuffer;
348 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
349 LOGE("Could not open codec.");
350 return -1; // Could not open codec
353 LOGE("stream format:%s", pFormatCtx->iformat->name);
354 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
355 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
356 LOGE("Decoder name:%s", pCodec->name);
358 // Allocate video frame
359 AVFrame * pFrame = av_frame_alloc();
362 AVFrame * pFrameRGBA = av_frame_alloc();
363 if(pFrameRGBA == NULL || pFrame == NULL) {
364 LOGE("Could not allocate video frame.");
368 // Determine required buffer size and allocate buffer
369 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
370 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
371 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
372 pCodecCtx->width, pCodecCtx->height, 1);
374 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
375 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
388 while(av_read_frame(pFormatCtx, &packet)>=0) {
389 // Is this a packet from the video stream?
390 if(packet.stream_index==videoStream) {
392 // Decode video frame
393 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
395 // 并不是decode一次就可解码出一帧
398 // lock native window buffer
399 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
402 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
403 pFrame->linesize, 0, pCodecCtx->height,
404 pFrameRGBA->data, pFrameRGBA->linesize);
407 uint8_t * dst = (uint8_t*) windowBuffer.bits;
408 int dstStride = windowBuffer.stride * 4;
409 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
410 int srcStride = pFrameRGBA->linesize[0];
412 // 由于window的stride和帧的stride不同,因此需要逐行复制
414 for (h = 0; h < videoHeight; h++) {
415 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
418 ANativeWindow_unlockAndPost(nativeWindow);
422 av_packet_unref(&packet);
428 // Free the YUV frame
432 avcodec_close(pCodecCtx);
434 // Close the video file
435 avformat_close_input(&pFormatCtx);
437 env->ReleaseStringUTFChars(fname, file_name);