2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
15 #include "libavformat/avformat.h"
16 #include "libavcodec/avcodec.h"
17 #include "libswscale/swscale.h"
18 #include "libavutil/imgutils.h"
19 #include "libavutil/time.h"
20 #include "libavdevice/avdevice.h"
24 AVFormatContext *ofmt_ctx;
26 AVCodecContext* pCodecCtx;
32 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
34 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
42 static int print_prefix = 1;
43 //static char prev[1024];
46 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
49 //sanitize((uint8_t *)line);
51 if (level <= AV_LOG_WARNING){
65 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init__ (JNIEnv *env, jobject obj ){
66 LOGE("########## Ffmpeg Init ##########");
67 unsigned int v = avutil_version();
68 LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
69 v = avcodec_version();
70 LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
71 v = avformat_version();
72 LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
73 v = avdevice_version();
74 LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
76 av_log_set_level(AV_LOG_TRACE);
78 avdevice_register_all();
79 avformat_network_init();
80 av_log_set_callback(custom_log);
84 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
85 jint v = avformat_version();
86 LOGE("######### Ffmpeg JNI version i= %d", v);
88 system("su -c chmod 666 /dev/video0");
90 LOGE("######### Ffmpeg JNI version i= %d", v);
93 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
94 avdevice_register_all();
95 av_log_set_callback(custom_log);
96 AVInputFormat *ifmt=av_find_input_format("video4linux2");
97 LOGE("===%s===", ifmt->name);
98 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
99 LOGE("Couldn't open input stream.\n");
100 return env->NewStringUTF("===== error =======");
105 return env->NewStringUTF("====== Ffmpeg call =======");
108 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
110 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
112 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
113 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
115 // const char* out_path = "/storage/sdcard0/output.flv";
119 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
123 y_length=width*height;
124 uv_length=width*height/4;
130 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
131 //output encoder initialize
132 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
134 LOGE("Can not find encoder!\n");
137 pCodecCtx = avcodec_alloc_context3(pCodec);
138 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
139 pCodecCtx->width = width;
140 pCodecCtx->height = height;
141 pCodecCtx->time_base.num = 1;
142 pCodecCtx->time_base.den = 30;
143 pCodecCtx->bit_rate = 800000;
144 pCodecCtx->gop_size = 300;
145 /* Some formats want stream headers to be separate. */
146 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
147 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
150 //pCodecCtx->me_range = 16;
151 //pCodecCtx->max_qdiff = 4;
152 //pCodecCtx->qcompress = 0.6;
153 pCodecCtx->qmin = 10;
154 pCodecCtx->qmax = 51;
156 pCodecCtx->max_b_frames = 3;
157 // Set H264 preset and tune
158 AVDictionary *param = 0;
159 av_dict_set(¶m, "preset", "ultrafast", 0);
160 av_dict_set(¶m, "tune", "zerolatency", 0);
162 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
163 LOGE("Failed to open encoder!\n");
167 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
168 video_st = avformat_new_stream(ofmt_ctx, pCodec);
169 if (video_st == NULL){
172 video_st->time_base.num = 1;
173 video_st->time_base.den = 30;
174 video_st->codec = pCodecCtx;
176 //Open output URL,set before avformat_write_header() for muxing
178 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
179 LOGE("Failed to open output file! return :%d\n", ret);
184 avformat_write_header(ofmt_ctx, NULL);
186 start_time = av_gettime();
190 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
194 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
199 av_init_packet(&enc_pkt);
200 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
208 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
211 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
212 AVRational r_framerate1 = { 60, 2 };
213 AVRational time_base_q = { 1, AV_TIME_BASE };
214 //Duration between 2 frames (us)
215 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
217 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
218 enc_pkt.dts = enc_pkt.pts;
219 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
221 //转换PTS/DTS(Convert PTS/DTS)
224 ofmt_ctx->duration = enc_pkt.duration * framecnt;
226 /* mux encoded frame */
227 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
232 av_write_trailer(ofmt_ctx);
236 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
238 avcodec_close(video_st->codec);
239 avio_close(ofmt_ctx->pb);
240 avformat_free_context(ofmt_ctx);
244 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
249 //LOGE(" process data - ffmpeg");
250 pFrameYUV = av_frame_alloc();
251 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
252 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
254 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
255 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
256 memcpy(pFrameYUV->data[0],in,y_length);
257 for(i=0;i<uv_length;i++)
259 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
260 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
263 pFrameYUV->format = AV_PIX_FMT_YUV420P;
264 pFrameYUV->width = yuv_width;
265 pFrameYUV->height = yuv_height;
269 av_init_packet(&enc_pkt);
270 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
271 av_frame_free(&pFrameYUV);
273 if (enc_got_frame == 1){
274 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
276 enc_pkt.stream_index = video_st->index;
279 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
280 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
281 AVRational time_base_q = { 1, AV_TIME_BASE };
282 //Duration between 2 frames (us)
283 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
285 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
286 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
287 enc_pkt.dts = enc_pkt.pts;
288 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
292 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
293 int64_t now_time = av_gettime() - start_time;
294 if (pts_time > now_time)
295 av_usleep(pts_time - now_time);
297 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
298 av_free_packet(&enc_pkt);
303 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
309 LOGE("###### video play #####");
310 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
311 const char * file_name = env->GetStringUTFChars(fname, 0);
314 avdevice_register_all();
317 AVFormatContext * pFormatCtx = avformat_alloc_context();
321 av_log_set_callback(custom_log);
323 AVInputFormat *ifmt=av_find_input_format("video4linux2");
324 LOGE("===%s===", ifmt->name);
325 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
326 LOGE("Couldn't open file:\n");
327 return -1; // Couldn't open file
335 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
337 LOGE("Couldn't open file:%s\n", file_name);
338 return -1; // Couldn't open file
341 // Retrieve stream information
342 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
343 LOGE("Couldn't find stream information.");
347 // Find the first video stream
348 int videoStream = -1, i;
349 for (i = 0; i < pFormatCtx->nb_streams; i++) {
350 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
351 && videoStream < 0) {
355 if(videoStream==-1) {
356 LOGE("Didn't find a video stream.");
357 return -1; // Didn't find a video stream
360 // Get a pointer to the codec context for the video stream
361 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
362 LOGE("============= %d ========",__LINE__);
363 // Find the decoder for the video stream
364 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
366 LOGE("Codec not found.");
367 return -1; // Codec not found
370 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
371 LOGE("Could not open codec.");
372 return -1; // Could not open codec
376 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
379 int videoWidth = pCodecCtx->width;
380 int videoHeight = pCodecCtx->height;
382 // 设置native window的buffer大小,可自动拉伸
383 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
384 ANativeWindow_Buffer windowBuffer;
386 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
387 LOGE("Could not open codec.");
388 return -1; // Could not open codec
391 LOGE("stream format:%s", pFormatCtx->iformat->name);
392 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
393 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
394 LOGE("Decoder name:%s", pCodec->name);
396 // Allocate video frame
397 AVFrame * pFrame = av_frame_alloc();
400 AVFrame * pFrameRGBA = av_frame_alloc();
401 if(pFrameRGBA == NULL || pFrame == NULL) {
402 LOGE("Could not allocate video frame.");
406 // Determine required buffer size and allocate buffer
407 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
408 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
409 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
410 pCodecCtx->width, pCodecCtx->height, 1);
412 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
413 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
426 while(av_read_frame(pFormatCtx, &packet)>=0) {
427 // Is this a packet from the video stream?
428 if(packet.stream_index==videoStream) {
430 // Decode video frame
431 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
433 // 并不是decode一次就可解码出一帧
436 // lock native window buffer
437 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
440 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
441 pFrame->linesize, 0, pCodecCtx->height,
442 pFrameRGBA->data, pFrameRGBA->linesize);
445 uint8_t * dst = (uint8_t*) windowBuffer.bits;
446 int dstStride = windowBuffer.stride * 4;
447 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
448 int srcStride = pFrameRGBA->linesize[0];
450 // 由于window的stride和帧的stride不同,因此需要逐行复制
452 for (h = 0; h < videoHeight; h++) {
453 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
456 ANativeWindow_unlockAndPost(nativeWindow);
460 av_packet_unref(&packet);
466 // Free the YUV frame
470 avcodec_close(pCodecCtx);
472 // Close the video file
473 avformat_close_input(&pFormatCtx);
475 env->ReleaseStringUTFChars(fname, file_name);
479 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
482 av_log_set_level(AV_LOG_TRACE);
484 avformat_network_init();
485 avdevice_register_all();
488 LOGE("====push=====");
489 // av_log_set_callback(custom_log);
491 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
492 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
493 const char* out_path = env->GetStringUTFChars(url, 0);
494 //const char * file_name = env->GetStringUTFChars(fname, 0);
499 AVFormatContext *pFormatCtx = avformat_alloc_context();
501 AVInputFormat *ifmt = av_find_input_format("video4linux2");
502 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
503 // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
505 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
509 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
510 LOGE( "could not find stream info");
514 av_dump_format(pFormatCtx, 0, "0", 0);
517 int video_index = -1;
518 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
523 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
524 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
531 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
532 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
534 AVFormatContext *ofmt_ctx;
535 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
536 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
538 LOGE("Can not find endoder");
542 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
543 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
544 oCodecCtx->width = pCodecCtx->width;
545 oCodecCtx->height = pCodecCtx->height;
546 oCodecCtx->time_base.num = 1;
547 oCodecCtx->time_base.den = 30;
548 oCodecCtx->bit_rate = 800000;
549 oCodecCtx->gop_size = 300;
550 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
551 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
552 oCodecCtx->qmin = 10;
553 oCodecCtx->qmax = 51;
554 oCodecCtx->max_b_frames = 3;
556 AVDictionary *params = 0;
557 av_dict_set(¶ms, "preset", "ultrafast", 0);
558 av_dict_set(¶ms, "tune", "zerolatency", 0);
560 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
561 LOGE("Failed to open encoder");
565 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
566 if (videoStream == NULL){
570 videoStream->time_base.num = 1;
571 videoStream->time_base.den = 30;
572 videoStream->codec = oCodecCtx;
574 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
575 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
576 //LOGE("Failed open out file22 erro=%d", ret);
580 avformat_write_header(ofmt_ctx, NULL);
587 AVFrame *pFrame, *pFrameYUV;
588 pFrame = av_frame_alloc();
589 pFrameYUV = av_frame_alloc();
591 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
592 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
593 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
595 pFrameYUV->format = AV_PIX_FMT_YUV420P;
596 pFrameYUV->width = pCodecCtx->width;
597 pFrameYUV->height = pCodecCtx->height;
599 struct SwsContext *img_convert_ctx;
600 img_convert_ctx = sws_getContext(pCodecCtx->width,
609 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
614 int64_t framecnt = 0;
616 while(av_read_frame(pFormatCtx, packet) >= 0){
617 if (packet->stream_index == video_index){
618 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
620 LOGE("Decode Error.");
624 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
628 av_init_packet(&enc_pkt);
629 int enc_got_frame = 0;
630 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
631 if (enc_got_frame == 1){
634 enc_pkt.stream_index = videoStream->index;
637 AVRational time_base = ofmt_ctx->streams[0]->time_base;
638 AVRational r_framerate1 = {60, 2};
639 AVRational time_base_q = {1, AV_TIME_BASE};
641 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
642 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
643 enc_pkt.dts = enc_pkt.pts;
644 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
647 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
649 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
650 //av_frame_free(&pFrameYUV);
651 //av_packet_unref(packet);
653 av_free_packet(&enc_pkt);
654 //av_packet_unref(&enc_pkt);
658 av_packet_unref(packet);
661 sws_freeContext(img_convert_ctx);
664 avcodec_close(pCodecCtx);
665 avformat_close_input(&pFormatCtx);
669 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
671 LOGE("###### video preview #####");
674 avdevice_register_all();
677 AVFormatContext * pFormatCtx = avformat_alloc_context();
680 av_log_set_callback(custom_log);
682 AVInputFormat *ifmt=av_find_input_format("video4linux2");
683 LOGE("===%s===", ifmt->name);
684 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
685 LOGE("Couldn't open file:\n");
686 return -1; // Couldn't open file
689 // Retrieve stream information
690 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
691 LOGE("Couldn't find stream information.");
695 // Find the first video stream
696 int videoStream = -1, i;
697 for (i = 0; i < pFormatCtx->nb_streams; i++) {
698 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
699 && videoStream < 0) {
703 if(videoStream==-1) {
704 LOGE("Didn't find a video stream.");
705 return -1; // Didn't find a video stream
708 // Get a pointer to the codec context for the video stream
709 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
710 LOGE("============= %d ========",__LINE__);
711 // Find the decoder for the video stream
712 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
714 LOGE("Codec not found.");
715 return -1; // Codec not found
718 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
719 LOGE("Could not open codec.");
720 return -1; // Could not open codec
724 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
727 int videoWidth = pCodecCtx->width;
728 int videoHeight = pCodecCtx->height;
730 // 设置native window的buffer大小,可自动拉伸
731 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
732 ANativeWindow_Buffer windowBuffer;
735 LOGE("stream format:%s", pFormatCtx->iformat->name);
736 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
737 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
738 LOGE("Decoder name:%s", pCodec->name);
740 // Allocate video frame
741 AVFrame * pFrame = av_frame_alloc();
744 AVFrame * pFrameRGBA = av_frame_alloc();
745 if(pFrameRGBA == NULL || pFrame == NULL) {
746 LOGE("Could not allocate video frame.");
750 // Determine required buffer size and allocate buffer
751 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
752 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
753 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
754 pCodecCtx->width, pCodecCtx->height, 1);
756 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
757 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
770 while(av_read_frame(pFormatCtx, &packet)>=0) {
771 // Is this a packet from the video stream?
772 if(packet.stream_index==videoStream) {
774 // Decode video frame
775 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
777 // 并不是decode一次就可解码出一帧
780 // lock native window buffer
781 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
784 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
785 pFrame->linesize, 0, pCodecCtx->height,
786 pFrameRGBA->data, pFrameRGBA->linesize);
789 uint8_t * dst = (uint8_t*) windowBuffer.bits;
790 int dstStride = windowBuffer.stride * 4;
791 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
792 int srcStride = pFrameRGBA->linesize[0];
794 // 由于window的stride和帧的stride不同,因此需要逐行复制
796 for (h = 0; h < videoHeight; h++) {
797 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
800 ANativeWindow_unlockAndPost(nativeWindow);
804 av_packet_unref(&packet);
810 // Free the YUV frame
814 avcodec_close(pCodecCtx);
816 // Close the video file
817 avformat_close_input(&pFormatCtx);
819 //env->ReleaseStringUTFChars(fname, file_name);
823 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
825 LOGE("getPerfectDevice");
826 AVFormatContext *pFormatCtx = avformat_alloc_context();
827 AVInputFormat *ifmt = av_find_input_format("video4linux2");
828 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
829 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
832 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
833 LOGE( "could not find stream info");
836 av_dump_format(pFormatCtx, 0, "0", 0);
837 avformat_free_context(pFormatCtx);
838 //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
839 system("touch /storage/sdcard0/aa");
841 return env->NewStringUTF("====== Ffmpeg call =======");
847 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
848 char path[512] = {0};
849 char* real_path = NULL;
851 LOGE("=================");
852 //system("su -c chmod 666 /dev/video0");
854 #ifdef ANDROID_USB_CAMERA
855 //MY_USB_CAMER_FD = fd;
856 avdevice_set_android_usb_fd(fd);
858 //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
861 sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
863 LOGE("fd path is %s.", path);
864 real_path = realpath(path, NULL);
865 if(real_path != NULL){
866 LOGE("get full path from fd %s.", real_path);
876 LOGE("====push=====");
877 // av_log_set_callback(custom_log);
881 AVFormatContext *pFormatCtx = avformat_alloc_context();
883 AVInputFormat *ifmt = av_find_input_format("video4linux2");
884 //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
885 if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
887 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
891 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
892 LOGE( "could not find stream info");
896 av_dump_format(pFormatCtx, 0, "0", 0);
899 int video_index = -1;
900 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
905 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
906 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
913 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
914 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
916 AVFormatContext *ofmt_ctx;
917 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
918 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
920 LOGE("Can not find endoder");
924 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
925 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
926 oCodecCtx->width = pCodecCtx->width;
927 oCodecCtx->height = pCodecCtx->height;
928 oCodecCtx->time_base.num = 1;
929 oCodecCtx->time_base.den = 30;
930 oCodecCtx->bit_rate = 800000;
931 oCodecCtx->gop_size = 300;
932 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
933 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
934 oCodecCtx->qmin = 10;
935 oCodecCtx->qmax = 51;
936 oCodecCtx->max_b_frames = 3;
938 AVDictionary *params = 0;
939 av_dict_set(¶ms, "preset", "ultrafast", 0);
940 av_dict_set(¶ms, "tune", "zerolatency", 0);
942 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
943 LOGE("Failed to open encoder");
947 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
948 if (videoStream == NULL){
952 videoStream->time_base.num = 1;
953 videoStream->time_base.den = 30;
954 videoStream->codec = oCodecCtx;
956 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
957 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
958 //LOGE("Failed open out file22 erro=%d", ret);
962 avformat_write_header(ofmt_ctx, NULL);
969 AVFrame *pFrame, *pFrameYUV;
970 pFrame = av_frame_alloc();
971 pFrameYUV = av_frame_alloc();
973 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
974 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
975 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
977 pFrameYUV->format = AV_PIX_FMT_YUV420P;
978 pFrameYUV->width = pCodecCtx->width;
979 pFrameYUV->height = pCodecCtx->height;
981 struct SwsContext *img_convert_ctx;
982 img_convert_ctx = sws_getContext(pCodecCtx->width,
991 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
996 int64_t framecnt = 0;
998 while(av_read_frame(pFormatCtx, packet) >= 0){
999 if (packet->stream_index == video_index){
1000 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
1002 LOGE("Decode Error.");
1006 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
1008 enc_pkt.data = NULL;
1010 av_init_packet(&enc_pkt);
1011 int enc_got_frame = 0;
1012 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
1013 if (enc_got_frame == 1){
1016 enc_pkt.stream_index = videoStream->index;
1019 AVRational time_base = ofmt_ctx->streams[0]->time_base;
1020 AVRational r_framerate1 = {60, 2};
1021 AVRational time_base_q = {1, AV_TIME_BASE};
1023 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
1024 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
1025 enc_pkt.dts = enc_pkt.pts;
1026 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
1029 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
1031 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
1032 //av_frame_free(&pFrameYUV);
1033 //av_packet_unref(packet);
1035 av_free_packet(&enc_pkt);
1036 //av_packet_unref(&enc_pkt);
1040 av_packet_unref(packet);
1043 sws_freeContext(img_convert_ctx);
1046 avcodec_close(pCodecCtx);
1047 avformat_close_input(&pFormatCtx);