2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
15 #include "libavformat/avformat.h"
16 #include "libavcodec/avcodec.h"
17 #include "libswscale/swscale.h"
18 #include "libavutil/imgutils.h"
19 #include "libavutil/time.h"
20 #include "libavdevice/avdevice.h"
24 AVFormatContext *ofmt_ctx;
26 AVCodecContext* pCodecCtx;
32 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
34 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
42 static int print_prefix = 1;
43 //static char prev[1024];
46 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
49 //sanitize((uint8_t *)line);
51 if (level <= AV_LOG_WARNING){
65 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj ){
66 LOGE("########## Ffmpeg Init ##########");
67 unsigned int v = avutil_version();
68 LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
69 v = avcodec_version();
70 LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
71 v = avformat_version();
72 LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
73 v = avdevice_version();
74 LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
76 //system("su -c chmod 666 /dev/video0");
77 system("/system/xbin/su -c echo 'wowo' >> /data/local/test");
78 system("echo 'wowow' >> /sdcard/peng/test");
80 av_log_set_level(AV_LOG_TRACE);
82 avdevice_register_all();
83 avformat_network_init();
84 av_log_set_callback(custom_log);
88 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
89 jint v = avformat_version();
90 LOGE("######### Ffmpeg JNI version i= %d", v);
92 system("su -c chmod 666 /dev/video0");
94 LOGE("######### Ffmpeg JNI version i= %d", v);
97 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
98 avdevice_register_all();
99 av_log_set_callback(custom_log);
100 AVInputFormat *ifmt=av_find_input_format("video4linux2");
101 LOGE("===%s===", ifmt->name);
102 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
103 LOGE("Couldn't open input stream.\n");
104 return env->NewStringUTF("===== error =======");
109 return env->NewStringUTF("====== Ffmpeg call =======");
112 //const char* out_path;
114 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_setRtmpUrl (JNIEnv *env, jobject obj, jstring url){
116 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
117 //out_path = env->GetStringUTFChars(url, 0);
120 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_initnew (JNIEnv *env, jobject obj, jint width, jint height, jstring url)
122 const char * out_path= env->GetStringUTFChars(url, 0);
123 LOGE("Ffmpeg init, width=%d, heigh=%d, url=%s", width, height, out_path);
127 y_length=width*height;
128 uv_length=width*height/4;
134 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
135 //output encoder initialize
136 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
138 LOGE("Can not find encoder!\n");
141 pCodecCtx = avcodec_alloc_context3(pCodec);
142 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
143 pCodecCtx->width = width;
144 pCodecCtx->height = height;
145 pCodecCtx->time_base.num = 1;
146 pCodecCtx->time_base.den = 30;
147 pCodecCtx->bit_rate = 800000;
148 pCodecCtx->gop_size = 300;
149 /* Some formats want stream headers to be separate. */
150 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
151 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
154 //pCodecCtx->me_range = 16;
155 //pCodecCtx->max_qdiff = 4;
156 //pCodecCtx->qcompress = 0.6;
157 pCodecCtx->qmin = 10;
158 pCodecCtx->qmax = 51;
160 pCodecCtx->max_b_frames = 3;
161 // Set H264 preset and tune
162 AVDictionary *param = 0;
163 av_dict_set(¶m, "preset", "ultrafast", 0);
164 av_dict_set(¶m, "tune", "zerolatency", 0);
166 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
167 LOGE("Failed to open encoder!\n");
171 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
172 video_st = avformat_new_stream(ofmt_ctx, pCodec);
173 if (video_st == NULL){
176 video_st->time_base.num = 1;
177 video_st->time_base.den = 30;
178 video_st->codec = pCodecCtx;
180 //Open output URL,set before avformat_write_header() for muxing
182 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
183 LOGE("Failed to open output file! return :%s(%d)\n", av_err2str(ret),ret);
188 avformat_write_header(ofmt_ctx, NULL);
190 start_time = av_gettime();
191 env->ReleaseStringUTFChars(url, out_path);
196 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_inithaha (JNIEnv *env, jobject obj, jint width, jint height) {
198 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
200 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
201 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
203 // const char* out_path = "/storage/sdcard0/output.flv";
207 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
211 y_length=width*height;
212 uv_length=width*height/4;
218 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
219 //output encoder initialize
220 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
222 LOGE("Can not find encoder!\n");
225 pCodecCtx = avcodec_alloc_context3(pCodec);
226 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
227 pCodecCtx->width = width;
228 pCodecCtx->height = height;
229 pCodecCtx->time_base.num = 1;
230 pCodecCtx->time_base.den = 30;
231 pCodecCtx->bit_rate = 800000;
232 pCodecCtx->gop_size = 300;
233 /* Some formats want stream headers to be separate. */
234 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
235 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
238 //pCodecCtx->me_range = 16;
239 //pCodecCtx->max_qdiff = 4;
240 //pCodecCtx->qcompress = 0.6;
241 pCodecCtx->qmin = 10;
242 pCodecCtx->qmax = 51;
244 pCodecCtx->max_b_frames = 3;
245 // Set H264 preset and tune
246 AVDictionary *param = 0;
247 av_dict_set(¶m, "preset", "ultrafast", 0);
248 av_dict_set(¶m, "tune", "zerolatency", 0);
250 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
251 LOGE("Failed to open encoder!\n");
255 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
256 video_st = avformat_new_stream(ofmt_ctx, pCodec);
257 if (video_st == NULL){
260 video_st->time_base.num = 1;
261 video_st->time_base.den = 30;
262 video_st->codec = pCodecCtx;
264 //Open output URL,set before avformat_write_header() for muxing
266 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
267 LOGE("Failed to open output file! return :%d\n", ret);
272 avformat_write_header(ofmt_ctx, NULL);
274 start_time = av_gettime();
278 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
282 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
287 av_init_packet(&enc_pkt);
288 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
296 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
299 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
300 AVRational r_framerate1 = { 60, 2 };
301 AVRational time_base_q = { 1, AV_TIME_BASE };
302 //Duration between 2 frames (us)
303 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
305 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
306 enc_pkt.dts = enc_pkt.pts;
307 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
309 //转换PTS/DTS(Convert PTS/DTS)
312 ofmt_ctx->duration = enc_pkt.duration * framecnt;
314 /* mux encoded frame */
315 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
320 av_write_trailer(ofmt_ctx);
324 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
326 avcodec_close(video_st->codec);
327 avio_close(ofmt_ctx->pb);
328 avformat_free_context(ofmt_ctx);
334 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_processnew (JNIEnv *env, jobject obj, jbyteArray yuv){
340 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
345 //LOGE(" process data - ffmpeg");
346 pFrameYUV = av_frame_alloc();
347 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
348 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
350 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
351 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
352 memcpy(pFrameYUV->data[0],in,y_length);
353 for(i=0;i<uv_length;i++)
355 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
356 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
359 pFrameYUV->format = AV_PIX_FMT_YUV420P;
360 pFrameYUV->width = yuv_width;
361 pFrameYUV->height = yuv_height;
365 av_init_packet(&enc_pkt);
366 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
367 av_frame_free(&pFrameYUV);
369 if (enc_got_frame == 1){
370 if (framecnt % (15 * 60) == 0){
371 LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
374 enc_pkt.stream_index = video_st->index;
377 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
378 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
379 AVRational time_base_q = { 1, AV_TIME_BASE };
380 //Duration between 2 frames (us)
381 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
383 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
384 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
385 enc_pkt.dts = enc_pkt.pts;
386 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
390 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
391 int64_t now_time = av_gettime() - start_time;
392 if (pts_time > now_time)
393 av_usleep(pts_time - now_time);
395 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
396 av_free_packet(&enc_pkt);
402 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
408 LOGE("###### video play #####");
409 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
410 const char * file_name = env->GetStringUTFChars(fname, 0);
413 avdevice_register_all();
416 AVFormatContext * pFormatCtx = avformat_alloc_context();
420 av_log_set_callback(custom_log);
422 AVInputFormat *ifmt=av_find_input_format("video4linux2");
423 LOGE("===%s===", ifmt->name);
424 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
425 LOGE("Couldn't open file:\n");
426 return -1; // Couldn't open file
434 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
436 LOGE("Couldn't open file:%s\n", file_name);
437 return -1; // Couldn't open file
440 // Retrieve stream information
441 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
442 LOGE("Couldn't find stream information.");
446 // Find the first video stream
447 int videoStream = -1, i;
448 for (i = 0; i < pFormatCtx->nb_streams; i++) {
449 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
450 && videoStream < 0) {
454 if(videoStream==-1) {
455 LOGE("Didn't find a video stream.");
456 return -1; // Didn't find a video stream
459 // Get a pointer to the codec context for the video stream
460 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
461 LOGE("============= %d ========",__LINE__);
462 // Find the decoder for the video stream
463 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
465 LOGE("Codec not found.");
466 return -1; // Codec not found
469 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
470 LOGE("Could not open codec.");
471 return -1; // Could not open codec
475 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
478 int videoWidth = pCodecCtx->width;
479 int videoHeight = pCodecCtx->height;
481 // 设置native window的buffer大小,可自动拉伸
482 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
483 ANativeWindow_Buffer windowBuffer;
485 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
486 LOGE("Could not open codec.");
487 return -1; // Could not open codec
490 LOGE("stream format:%s", pFormatCtx->iformat->name);
491 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
492 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
493 LOGE("Decoder name:%s", pCodec->name);
495 // Allocate video frame
496 AVFrame * pFrame = av_frame_alloc();
499 AVFrame * pFrameRGBA = av_frame_alloc();
500 if(pFrameRGBA == NULL || pFrame == NULL) {
501 LOGE("Could not allocate video frame.");
505 // Determine required buffer size and allocate buffer
506 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
507 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
508 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
509 pCodecCtx->width, pCodecCtx->height, 1);
511 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
512 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
525 while(av_read_frame(pFormatCtx, &packet)>=0) {
526 // Is this a packet from the video stream?
527 if(packet.stream_index==videoStream) {
529 // Decode video frame
530 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
532 // 并不是decode一次就可解码出一帧
535 // lock native window buffer
536 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
539 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
540 pFrame->linesize, 0, pCodecCtx->height,
541 pFrameRGBA->data, pFrameRGBA->linesize);
544 uint8_t * dst = (uint8_t*) windowBuffer.bits;
545 int dstStride = windowBuffer.stride * 4;
546 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
547 int srcStride = pFrameRGBA->linesize[0];
549 // 由于window的stride和帧的stride不同,因此需要逐行复制
551 for (h = 0; h < videoHeight; h++) {
552 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
555 ANativeWindow_unlockAndPost(nativeWindow);
559 av_packet_unref(&packet);
565 // Free the YUV frame
569 avcodec_close(pCodecCtx);
571 // Close the video file
572 avformat_close_input(&pFormatCtx);
574 env->ReleaseStringUTFChars(fname, file_name);
578 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
581 av_log_set_level(AV_LOG_TRACE);
583 avformat_network_init();
584 avdevice_register_all();
587 LOGE("====push=====");
588 // av_log_set_callback(custom_log);
590 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
591 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
592 //const char* out_path = env->GetStringUTFChars(url, 0);
593 //const char * file_name = env->GetStringUTFChars(fname, 0);
598 AVFormatContext *pFormatCtx = avformat_alloc_context();
600 AVInputFormat *ifmt = av_find_input_format("video4linux2");
601 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
602 // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
604 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
608 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
609 LOGE( "could not find stream info");
613 av_dump_format(pFormatCtx, 0, "0", 0);
616 int video_index = -1;
617 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
622 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
623 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
630 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
631 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
633 AVFormatContext *ofmt_ctx;
634 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
635 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
637 LOGE("Can not find endoder");
641 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
642 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
643 oCodecCtx->width = pCodecCtx->width;
644 oCodecCtx->height = pCodecCtx->height;
645 oCodecCtx->time_base.num = 1;
646 oCodecCtx->time_base.den = 30;
647 oCodecCtx->bit_rate = 800000;
648 oCodecCtx->gop_size = 300;
649 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
650 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
651 oCodecCtx->qmin = 10;
652 oCodecCtx->qmax = 51;
653 oCodecCtx->max_b_frames = 3;
655 AVDictionary *params = 0;
656 av_dict_set(¶ms, "preset", "ultrafast", 0);
657 av_dict_set(¶ms, "tune", "zerolatency", 0);
659 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
660 LOGE("Failed to open encoder");
664 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
665 if (videoStream == NULL){
669 videoStream->time_base.num = 1;
670 videoStream->time_base.den = 30;
671 videoStream->codec = oCodecCtx;
673 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
674 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
675 //LOGE("Failed open out file22 erro=%d", ret);
679 avformat_write_header(ofmt_ctx, NULL);
686 AVFrame *pFrame, *pFrameYUV;
687 pFrame = av_frame_alloc();
688 pFrameYUV = av_frame_alloc();
690 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
691 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
692 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
694 pFrameYUV->format = AV_PIX_FMT_YUV420P;
695 pFrameYUV->width = pCodecCtx->width;
696 pFrameYUV->height = pCodecCtx->height;
698 struct SwsContext *img_convert_ctx;
699 img_convert_ctx = sws_getContext(pCodecCtx->width,
708 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
713 int64_t framecnt = 0;
715 while(av_read_frame(pFormatCtx, packet) >= 0){
716 if (packet->stream_index == video_index){
717 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
719 LOGE("Decode Error.");
723 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
727 av_init_packet(&enc_pkt);
728 int enc_got_frame = 0;
729 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
730 if (enc_got_frame == 1){
733 enc_pkt.stream_index = videoStream->index;
736 AVRational time_base = ofmt_ctx->streams[0]->time_base;
737 AVRational r_framerate1 = {60, 2};
738 AVRational time_base_q = {1, AV_TIME_BASE};
740 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
741 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
742 enc_pkt.dts = enc_pkt.pts;
743 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
746 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
748 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
749 //av_frame_free(&pFrameYUV);
750 //av_packet_unref(packet);
752 av_free_packet(&enc_pkt);
753 //av_packet_unref(&enc_pkt);
757 av_packet_unref(packet);
760 sws_freeContext(img_convert_ctx);
763 avcodec_close(pCodecCtx);
764 avformat_close_input(&pFormatCtx);
768 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
770 LOGE("###### video preview #####");
773 avdevice_register_all();
776 AVFormatContext * pFormatCtx = avformat_alloc_context();
779 av_log_set_callback(custom_log);
781 AVInputFormat *ifmt=av_find_input_format("video4linux2");
782 LOGE("===%s===", ifmt->name);
783 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
784 LOGE("Couldn't open file:\n");
785 return -1; // Couldn't open file
788 // Retrieve stream information
789 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
790 LOGE("Couldn't find stream information.");
794 // Find the first video stream
795 int videoStream = -1, i;
796 for (i = 0; i < pFormatCtx->nb_streams; i++) {
797 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
798 && videoStream < 0) {
802 if(videoStream==-1) {
803 LOGE("Didn't find a video stream.");
804 return -1; // Didn't find a video stream
807 // Get a pointer to the codec context for the video stream
808 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
809 LOGE("============= %d ========",__LINE__);
810 // Find the decoder for the video stream
811 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
813 LOGE("Codec not found.");
814 return -1; // Codec not found
817 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
818 LOGE("Could not open codec.");
819 return -1; // Could not open codec
823 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
826 int videoWidth = pCodecCtx->width;
827 int videoHeight = pCodecCtx->height;
829 // 设置native window的buffer大小,可自动拉伸
830 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
831 ANativeWindow_Buffer windowBuffer;
834 LOGE("stream format:%s", pFormatCtx->iformat->name);
835 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
836 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
837 LOGE("Decoder name:%s", pCodec->name);
839 // Allocate video frame
840 AVFrame * pFrame = av_frame_alloc();
843 AVFrame * pFrameRGBA = av_frame_alloc();
844 if(pFrameRGBA == NULL || pFrame == NULL) {
845 LOGE("Could not allocate video frame.");
849 // Determine required buffer size and allocate buffer
850 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
851 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
852 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
853 pCodecCtx->width, pCodecCtx->height, 1);
855 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
856 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
869 while(av_read_frame(pFormatCtx, &packet)>=0) {
870 // Is this a packet from the video stream?
871 if(packet.stream_index==videoStream) {
873 // Decode video frame
874 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
876 // 并不是decode一次就可解码出一帧
879 // lock native window buffer
880 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
883 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
884 pFrame->linesize, 0, pCodecCtx->height,
885 pFrameRGBA->data, pFrameRGBA->linesize);
888 uint8_t * dst = (uint8_t*) windowBuffer.bits;
889 int dstStride = windowBuffer.stride * 4;
890 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
891 int srcStride = pFrameRGBA->linesize[0];
893 // 由于window的stride和帧的stride不同,因此需要逐行复制
895 for (h = 0; h < videoHeight; h++) {
896 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
899 ANativeWindow_unlockAndPost(nativeWindow);
903 av_packet_unref(&packet);
909 // Free the YUV frame
913 avcodec_close(pCodecCtx);
915 // Close the video file
916 avformat_close_input(&pFormatCtx);
918 //env->ReleaseStringUTFChars(fname, file_name);
922 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
924 LOGE("getPerfectDevice");
925 AVFormatContext *pFormatCtx = avformat_alloc_context();
926 AVInputFormat *ifmt = av_find_input_format("video4linux2");
927 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
928 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
931 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
932 LOGE( "could not find stream info");
935 av_dump_format(pFormatCtx, 0, "0", 0);
936 avformat_free_context(pFormatCtx);
937 //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
938 system("touch /storage/sdcard0/aa");
940 return env->NewStringUTF("====== Ffmpeg call =======");
946 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
947 char path[512] = {0};
948 char* real_path = NULL;
950 LOGE("=================");
951 //system("su -c chmod 666 /dev/video0");
953 #ifdef ANDROID_USB_CAMERA
954 //MY_USB_CAMER_FD = fd;
955 avdevice_set_android_usb_fd(fd);
957 //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
960 sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
962 LOGE("fd path is %s.", path);
963 real_path = realpath(path, NULL);
964 if(real_path != NULL){
965 LOGE("get full path from fd %s.", real_path);
975 LOGE("====push=====");
976 // av_log_set_callback(custom_log);
980 AVFormatContext *pFormatCtx = avformat_alloc_context();
982 AVInputFormat *ifmt = av_find_input_format("video4linux2");
983 //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
984 if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
986 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
990 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
991 LOGE( "could not find stream info");
995 av_dump_format(pFormatCtx, 0, "0", 0);
998 int video_index = -1;
999 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
1004 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
1005 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
1012 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
1013 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
1015 AVFormatContext *ofmt_ctx;
1016 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
1017 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
1019 LOGE("Can not find endoder");
1023 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
1024 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
1025 oCodecCtx->width = pCodecCtx->width;
1026 oCodecCtx->height = pCodecCtx->height;
1027 oCodecCtx->time_base.num = 1;
1028 oCodecCtx->time_base.den = 30;
1029 oCodecCtx->bit_rate = 800000;
1030 oCodecCtx->gop_size = 300;
1031 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
1032 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
1033 oCodecCtx->qmin = 10;
1034 oCodecCtx->qmax = 51;
1035 oCodecCtx->max_b_frames = 3;
1037 AVDictionary *params = 0;
1038 av_dict_set(¶ms, "preset", "ultrafast", 0);
1039 av_dict_set(¶ms, "tune", "zerolatency", 0);
1041 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
1042 LOGE("Failed to open encoder");
1046 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
1047 if (videoStream == NULL){
1051 videoStream->time_base.num = 1;
1052 videoStream->time_base.den = 30;
1053 videoStream->codec = oCodecCtx;
1055 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
1056 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
1057 //LOGE("Failed open out file22 erro=%d", ret);
1061 avformat_write_header(ofmt_ctx, NULL);
1068 AVFrame *pFrame, *pFrameYUV;
1069 pFrame = av_frame_alloc();
1070 pFrameYUV = av_frame_alloc();
1072 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1073 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
1074 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1076 pFrameYUV->format = AV_PIX_FMT_YUV420P;
1077 pFrameYUV->width = pCodecCtx->width;
1078 pFrameYUV->height = pCodecCtx->height;
1080 struct SwsContext *img_convert_ctx;
1081 img_convert_ctx = sws_getContext(pCodecCtx->width,
1090 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
1091 int got_picture = 0;
1095 int64_t framecnt = 0;
1097 while(av_read_frame(pFormatCtx, packet) >= 0){
1098 if (packet->stream_index == video_index){
1099 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
1101 LOGE("Decode Error.");
1105 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
1107 enc_pkt.data = NULL;
1109 av_init_packet(&enc_pkt);
1110 int enc_got_frame = 0;
1111 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
1112 if (enc_got_frame == 1){
1115 enc_pkt.stream_index = videoStream->index;
1118 AVRational time_base = ofmt_ctx->streams[0]->time_base;
1119 AVRational r_framerate1 = {60, 2};
1120 AVRational time_base_q = {1, AV_TIME_BASE};
1122 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
1123 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
1124 enc_pkt.dts = enc_pkt.pts;
1125 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
1128 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
1130 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
1131 //av_frame_free(&pFrameYUV);
1132 //av_packet_unref(packet);
1134 av_free_packet(&enc_pkt);
1135 //av_packet_unref(&enc_pkt);
1139 av_packet_unref(packet);
1142 sws_freeContext(img_convert_ctx);
1145 avcodec_close(pCodecCtx);
1146 avformat_close_input(&pFormatCtx);