2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
15 #include "libavformat/avformat.h"
16 #include "libavcodec/avcodec.h"
17 #include "libswscale/swscale.h"
18 #include "libavutil/imgutils.h"
19 #include "libavutil/time.h"
20 #include "libavdevice/avdevice.h"
24 AVFormatContext *ofmt_ctx;
26 AVCodecContext* pCodecCtx;
32 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
34 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
42 static int print_prefix = 1;
43 //static char prev[1024];
46 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
49 //sanitize((uint8_t *)line);
51 if (level <= AV_LOG_WARNING){
65 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj ){
66 LOGE("########## Ffmpeg Init ##########");
67 unsigned int v = avutil_version();
68 LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
69 v = avcodec_version();
70 LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
71 v = avformat_version();
72 LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
73 v = avdevice_version();
74 LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
76 //system("su -c chmod 666 /dev/video0");
77 system("/system/xbin/su -c echo 'wowo' >> /data/local/test");
78 system("echo 'wowow' >> /sdcard/peng/test");
80 av_log_set_level(AV_LOG_TRACE);
82 avdevice_register_all();
83 avformat_network_init();
84 av_log_set_callback(custom_log);
88 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
89 jint v = avformat_version();
90 LOGE("######### Ffmpeg JNI version i= %d", v);
92 system("su -c chmod 666 /dev/video0");
94 LOGE("######### Ffmpeg JNI version i= %d", v);
97 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
98 avdevice_register_all();
99 av_log_set_callback(custom_log);
100 AVInputFormat *ifmt=av_find_input_format("video4linux2");
101 LOGE("===%s===", ifmt->name);
102 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
103 LOGE("Couldn't open input stream.\n");
104 return env->NewStringUTF("===== error =======");
109 return env->NewStringUTF("====== Ffmpeg call =======");
112 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_inithaha (JNIEnv *env, jobject obj, jint width, jint height) {
114 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
116 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
117 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
119 // const char* out_path = "/storage/sdcard0/output.flv";
123 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
127 y_length=width*height;
128 uv_length=width*height/4;
134 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
135 //output encoder initialize
136 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
138 LOGE("Can not find encoder!\n");
141 pCodecCtx = avcodec_alloc_context3(pCodec);
142 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
143 pCodecCtx->width = width;
144 pCodecCtx->height = height;
145 pCodecCtx->time_base.num = 1;
146 pCodecCtx->time_base.den = 30;
147 pCodecCtx->bit_rate = 800000;
148 pCodecCtx->gop_size = 300;
149 /* Some formats want stream headers to be separate. */
150 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
151 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
154 //pCodecCtx->me_range = 16;
155 //pCodecCtx->max_qdiff = 4;
156 //pCodecCtx->qcompress = 0.6;
157 pCodecCtx->qmin = 10;
158 pCodecCtx->qmax = 51;
160 pCodecCtx->max_b_frames = 3;
161 // Set H264 preset and tune
162 AVDictionary *param = 0;
163 av_dict_set(¶m, "preset", "ultrafast", 0);
164 av_dict_set(¶m, "tune", "zerolatency", 0);
166 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
167 LOGE("Failed to open encoder!\n");
171 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
172 video_st = avformat_new_stream(ofmt_ctx, pCodec);
173 if (video_st == NULL){
176 video_st->time_base.num = 1;
177 video_st->time_base.den = 30;
178 video_st->codec = pCodecCtx;
180 //Open output URL,set before avformat_write_header() for muxing
182 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
183 LOGE("Failed to open output file! return :%d\n", ret);
188 avformat_write_header(ofmt_ctx, NULL);
190 start_time = av_gettime();
194 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
198 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
203 av_init_packet(&enc_pkt);
204 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
212 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
215 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
216 AVRational r_framerate1 = { 60, 2 };
217 AVRational time_base_q = { 1, AV_TIME_BASE };
218 //Duration between 2 frames (us)
219 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
221 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
222 enc_pkt.dts = enc_pkt.pts;
223 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
225 //转换PTS/DTS(Convert PTS/DTS)
228 ofmt_ctx->duration = enc_pkt.duration * framecnt;
230 /* mux encoded frame */
231 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
236 av_write_trailer(ofmt_ctx);
240 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
242 avcodec_close(video_st->codec);
243 avio_close(ofmt_ctx->pb);
244 avformat_free_context(ofmt_ctx);
248 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
253 LOGE(" process data - ffmpeg");
254 pFrameYUV = av_frame_alloc();
255 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
256 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
258 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
259 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
260 memcpy(pFrameYUV->data[0],in,y_length);
261 for(i=0;i<uv_length;i++)
263 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
264 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
267 pFrameYUV->format = AV_PIX_FMT_YUV420P;
268 pFrameYUV->width = yuv_width;
269 pFrameYUV->height = yuv_height;
273 av_init_packet(&enc_pkt);
274 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
275 av_frame_free(&pFrameYUV);
277 if (enc_got_frame == 1){
278 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
280 enc_pkt.stream_index = video_st->index;
283 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
284 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
285 AVRational time_base_q = { 1, AV_TIME_BASE };
286 //Duration between 2 frames (us)
287 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
289 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
290 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
291 enc_pkt.dts = enc_pkt.pts;
292 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
296 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
297 int64_t now_time = av_gettime() - start_time;
298 if (pts_time > now_time)
299 av_usleep(pts_time - now_time);
301 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
302 av_free_packet(&enc_pkt);
307 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
313 LOGE("###### video play #####");
314 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
315 const char * file_name = env->GetStringUTFChars(fname, 0);
318 avdevice_register_all();
321 AVFormatContext * pFormatCtx = avformat_alloc_context();
325 av_log_set_callback(custom_log);
327 AVInputFormat *ifmt=av_find_input_format("video4linux2");
328 LOGE("===%s===", ifmt->name);
329 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
330 LOGE("Couldn't open file:\n");
331 return -1; // Couldn't open file
339 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
341 LOGE("Couldn't open file:%s\n", file_name);
342 return -1; // Couldn't open file
345 // Retrieve stream information
346 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
347 LOGE("Couldn't find stream information.");
351 // Find the first video stream
352 int videoStream = -1, i;
353 for (i = 0; i < pFormatCtx->nb_streams; i++) {
354 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
355 && videoStream < 0) {
359 if(videoStream==-1) {
360 LOGE("Didn't find a video stream.");
361 return -1; // Didn't find a video stream
364 // Get a pointer to the codec context for the video stream
365 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
366 LOGE("============= %d ========",__LINE__);
367 // Find the decoder for the video stream
368 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
370 LOGE("Codec not found.");
371 return -1; // Codec not found
374 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
375 LOGE("Could not open codec.");
376 return -1; // Could not open codec
380 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
383 int videoWidth = pCodecCtx->width;
384 int videoHeight = pCodecCtx->height;
386 // 设置native window的buffer大小,可自动拉伸
387 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
388 ANativeWindow_Buffer windowBuffer;
390 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
391 LOGE("Could not open codec.");
392 return -1; // Could not open codec
395 LOGE("stream format:%s", pFormatCtx->iformat->name);
396 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
397 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
398 LOGE("Decoder name:%s", pCodec->name);
400 // Allocate video frame
401 AVFrame * pFrame = av_frame_alloc();
404 AVFrame * pFrameRGBA = av_frame_alloc();
405 if(pFrameRGBA == NULL || pFrame == NULL) {
406 LOGE("Could not allocate video frame.");
410 // Determine required buffer size and allocate buffer
411 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
412 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
413 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
414 pCodecCtx->width, pCodecCtx->height, 1);
416 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
417 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
430 while(av_read_frame(pFormatCtx, &packet)>=0) {
431 // Is this a packet from the video stream?
432 if(packet.stream_index==videoStream) {
434 // Decode video frame
435 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
437 // 并不是decode一次就可解码出一帧
440 // lock native window buffer
441 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
444 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
445 pFrame->linesize, 0, pCodecCtx->height,
446 pFrameRGBA->data, pFrameRGBA->linesize);
449 uint8_t * dst = (uint8_t*) windowBuffer.bits;
450 int dstStride = windowBuffer.stride * 4;
451 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
452 int srcStride = pFrameRGBA->linesize[0];
454 // 由于window的stride和帧的stride不同,因此需要逐行复制
456 for (h = 0; h < videoHeight; h++) {
457 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
460 ANativeWindow_unlockAndPost(nativeWindow);
464 av_packet_unref(&packet);
470 // Free the YUV frame
474 avcodec_close(pCodecCtx);
476 // Close the video file
477 avformat_close_input(&pFormatCtx);
479 env->ReleaseStringUTFChars(fname, file_name);
483 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
486 av_log_set_level(AV_LOG_TRACE);
488 avformat_network_init();
489 avdevice_register_all();
492 LOGE("====push=====");
493 // av_log_set_callback(custom_log);
495 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
496 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
497 const char* out_path = env->GetStringUTFChars(url, 0);
498 //const char * file_name = env->GetStringUTFChars(fname, 0);
503 AVFormatContext *pFormatCtx = avformat_alloc_context();
505 AVInputFormat *ifmt = av_find_input_format("video4linux2");
506 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
507 // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
509 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
513 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
514 LOGE( "could not find stream info");
518 av_dump_format(pFormatCtx, 0, "0", 0);
521 int video_index = -1;
522 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
527 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
528 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
535 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
536 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
538 AVFormatContext *ofmt_ctx;
539 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
540 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
542 LOGE("Can not find endoder");
546 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
547 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
548 oCodecCtx->width = pCodecCtx->width;
549 oCodecCtx->height = pCodecCtx->height;
550 oCodecCtx->time_base.num = 1;
551 oCodecCtx->time_base.den = 30;
552 oCodecCtx->bit_rate = 800000;
553 oCodecCtx->gop_size = 300;
554 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
555 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
556 oCodecCtx->qmin = 10;
557 oCodecCtx->qmax = 51;
558 oCodecCtx->max_b_frames = 3;
560 AVDictionary *params = 0;
561 av_dict_set(¶ms, "preset", "ultrafast", 0);
562 av_dict_set(¶ms, "tune", "zerolatency", 0);
564 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
565 LOGE("Failed to open encoder");
569 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
570 if (videoStream == NULL){
574 videoStream->time_base.num = 1;
575 videoStream->time_base.den = 30;
576 videoStream->codec = oCodecCtx;
578 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
579 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
580 //LOGE("Failed open out file22 erro=%d", ret);
584 avformat_write_header(ofmt_ctx, NULL);
591 AVFrame *pFrame, *pFrameYUV;
592 pFrame = av_frame_alloc();
593 pFrameYUV = av_frame_alloc();
595 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
596 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
597 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
599 pFrameYUV->format = AV_PIX_FMT_YUV420P;
600 pFrameYUV->width = pCodecCtx->width;
601 pFrameYUV->height = pCodecCtx->height;
603 struct SwsContext *img_convert_ctx;
604 img_convert_ctx = sws_getContext(pCodecCtx->width,
613 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
618 int64_t framecnt = 0;
620 while(av_read_frame(pFormatCtx, packet) >= 0){
621 if (packet->stream_index == video_index){
622 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
624 LOGE("Decode Error.");
628 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
632 av_init_packet(&enc_pkt);
633 int enc_got_frame = 0;
634 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
635 if (enc_got_frame == 1){
638 enc_pkt.stream_index = videoStream->index;
641 AVRational time_base = ofmt_ctx->streams[0]->time_base;
642 AVRational r_framerate1 = {60, 2};
643 AVRational time_base_q = {1, AV_TIME_BASE};
645 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
646 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
647 enc_pkt.dts = enc_pkt.pts;
648 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
651 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
653 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
654 //av_frame_free(&pFrameYUV);
655 //av_packet_unref(packet);
657 av_free_packet(&enc_pkt);
658 //av_packet_unref(&enc_pkt);
662 av_packet_unref(packet);
665 sws_freeContext(img_convert_ctx);
668 avcodec_close(pCodecCtx);
669 avformat_close_input(&pFormatCtx);
673 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
675 LOGE("###### video preview #####");
678 avdevice_register_all();
681 AVFormatContext * pFormatCtx = avformat_alloc_context();
684 av_log_set_callback(custom_log);
686 AVInputFormat *ifmt=av_find_input_format("video4linux2");
687 LOGE("===%s===", ifmt->name);
688 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
689 LOGE("Couldn't open file:\n");
690 return -1; // Couldn't open file
693 // Retrieve stream information
694 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
695 LOGE("Couldn't find stream information.");
699 // Find the first video stream
700 int videoStream = -1, i;
701 for (i = 0; i < pFormatCtx->nb_streams; i++) {
702 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
703 && videoStream < 0) {
707 if(videoStream==-1) {
708 LOGE("Didn't find a video stream.");
709 return -1; // Didn't find a video stream
712 // Get a pointer to the codec context for the video stream
713 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
714 LOGE("============= %d ========",__LINE__);
715 // Find the decoder for the video stream
716 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
718 LOGE("Codec not found.");
719 return -1; // Codec not found
722 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
723 LOGE("Could not open codec.");
724 return -1; // Could not open codec
728 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
731 int videoWidth = pCodecCtx->width;
732 int videoHeight = pCodecCtx->height;
734 // 设置native window的buffer大小,可自动拉伸
735 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
736 ANativeWindow_Buffer windowBuffer;
739 LOGE("stream format:%s", pFormatCtx->iformat->name);
740 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
741 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
742 LOGE("Decoder name:%s", pCodec->name);
744 // Allocate video frame
745 AVFrame * pFrame = av_frame_alloc();
748 AVFrame * pFrameRGBA = av_frame_alloc();
749 if(pFrameRGBA == NULL || pFrame == NULL) {
750 LOGE("Could not allocate video frame.");
754 // Determine required buffer size and allocate buffer
755 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
756 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
757 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
758 pCodecCtx->width, pCodecCtx->height, 1);
760 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
761 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
774 while(av_read_frame(pFormatCtx, &packet)>=0) {
775 // Is this a packet from the video stream?
776 if(packet.stream_index==videoStream) {
778 // Decode video frame
779 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
781 // 并不是decode一次就可解码出一帧
784 // lock native window buffer
785 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
788 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
789 pFrame->linesize, 0, pCodecCtx->height,
790 pFrameRGBA->data, pFrameRGBA->linesize);
793 uint8_t * dst = (uint8_t*) windowBuffer.bits;
794 int dstStride = windowBuffer.stride * 4;
795 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
796 int srcStride = pFrameRGBA->linesize[0];
798 // 由于window的stride和帧的stride不同,因此需要逐行复制
800 for (h = 0; h < videoHeight; h++) {
801 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
804 ANativeWindow_unlockAndPost(nativeWindow);
808 av_packet_unref(&packet);
814 // Free the YUV frame
818 avcodec_close(pCodecCtx);
820 // Close the video file
821 avformat_close_input(&pFormatCtx);
823 //env->ReleaseStringUTFChars(fname, file_name);
827 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
829 LOGE("getPerfectDevice");
830 AVFormatContext *pFormatCtx = avformat_alloc_context();
831 AVInputFormat *ifmt = av_find_input_format("video4linux2");
832 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
833 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
836 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
837 LOGE( "could not find stream info");
840 av_dump_format(pFormatCtx, 0, "0", 0);
841 avformat_free_context(pFormatCtx);
842 //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
843 system("touch /storage/sdcard0/aa");
845 return env->NewStringUTF("====== Ffmpeg call =======");
851 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
852 char path[512] = {0};
853 char* real_path = NULL;
855 LOGE("=================");
856 //system("su -c chmod 666 /dev/video0");
858 #ifdef ANDROID_USB_CAMERA
859 //MY_USB_CAMER_FD = fd;
860 avdevice_set_android_usb_fd(fd);
862 //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
865 sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
867 LOGE("fd path is %s.", path);
868 real_path = realpath(path, NULL);
869 if(real_path != NULL){
870 LOGE("get full path from fd %s.", real_path);
880 LOGE("====push=====");
881 // av_log_set_callback(custom_log);
885 AVFormatContext *pFormatCtx = avformat_alloc_context();
887 AVInputFormat *ifmt = av_find_input_format("video4linux2");
888 //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
889 if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
891 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
895 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
896 LOGE( "could not find stream info");
900 av_dump_format(pFormatCtx, 0, "0", 0);
903 int video_index = -1;
904 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
909 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
910 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
917 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
918 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
920 AVFormatContext *ofmt_ctx;
921 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
922 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
924 LOGE("Can not find endoder");
928 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
929 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
930 oCodecCtx->width = pCodecCtx->width;
931 oCodecCtx->height = pCodecCtx->height;
932 oCodecCtx->time_base.num = 1;
933 oCodecCtx->time_base.den = 30;
934 oCodecCtx->bit_rate = 800000;
935 oCodecCtx->gop_size = 300;
936 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
937 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
938 oCodecCtx->qmin = 10;
939 oCodecCtx->qmax = 51;
940 oCodecCtx->max_b_frames = 3;
942 AVDictionary *params = 0;
943 av_dict_set(¶ms, "preset", "ultrafast", 0);
944 av_dict_set(¶ms, "tune", "zerolatency", 0);
946 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
947 LOGE("Failed to open encoder");
951 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
952 if (videoStream == NULL){
956 videoStream->time_base.num = 1;
957 videoStream->time_base.den = 30;
958 videoStream->codec = oCodecCtx;
960 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
961 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
962 //LOGE("Failed open out file22 erro=%d", ret);
966 avformat_write_header(ofmt_ctx, NULL);
973 AVFrame *pFrame, *pFrameYUV;
974 pFrame = av_frame_alloc();
975 pFrameYUV = av_frame_alloc();
977 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
978 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
979 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
981 pFrameYUV->format = AV_PIX_FMT_YUV420P;
982 pFrameYUV->width = pCodecCtx->width;
983 pFrameYUV->height = pCodecCtx->height;
985 struct SwsContext *img_convert_ctx;
986 img_convert_ctx = sws_getContext(pCodecCtx->width,
995 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
1000 int64_t framecnt = 0;
1002 while(av_read_frame(pFormatCtx, packet) >= 0){
1003 if (packet->stream_index == video_index){
1004 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
1006 LOGE("Decode Error.");
1010 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
1012 enc_pkt.data = NULL;
1014 av_init_packet(&enc_pkt);
1015 int enc_got_frame = 0;
1016 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
1017 if (enc_got_frame == 1){
1020 enc_pkt.stream_index = videoStream->index;
1023 AVRational time_base = ofmt_ctx->streams[0]->time_base;
1024 AVRational r_framerate1 = {60, 2};
1025 AVRational time_base_q = {1, AV_TIME_BASE};
1027 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
1028 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
1029 enc_pkt.dts = enc_pkt.pts;
1030 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
1033 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
1035 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
1036 //av_frame_free(&pFrameYUV);
1037 //av_packet_unref(packet);
1039 av_free_packet(&enc_pkt);
1040 //av_packet_unref(&enc_pkt);
1044 av_packet_unref(packet);
1047 sws_freeContext(img_convert_ctx);
1050 avcodec_close(pCodecCtx);
1051 avformat_close_input(&pFormatCtx);