2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
15 #include "libavformat/avformat.h"
16 #include "libavcodec/avcodec.h"
17 #include "libswscale/swscale.h"
18 #include "libavutil/imgutils.h"
19 #include "libavutil/time.h"
20 #include "libavdevice/avdevice.h"
24 AVFormatContext *ofmt_ctx;
26 AVCodecContext* pCodecCtx;
31 void javaPrint(JNIEnv *env, jobject obj, const char* str)
33 jclass clazz = (*env).GetObjectClass(obj);
34 jobject mobj = env->NewGlobalRef(obj);
35 jmethodID mmid = env->GetMethodID(clazz, "print", "(Ljava/lang/String;)V");
36 jstring jstr = env->NewStringUTF(str);
37 env->CallVoidMethod(mobj, mmid, jstr);
38 env->DeleteLocalRef(jstr);
44 void custom_log222 (void *ptr, int level, const char* fmt, va_list vl){
45 static int print_prefix = 1;
47 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
48 if (level <= AV_LOG_WARNING){
50 javaPrint(g_env, g_obj, line);
53 //javaPrint(g_env, g_obj, line);
57 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
59 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
67 static int print_prefix = 1;
68 //static char prev[1024];
71 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
74 //sanitize((uint8_t *)line);
76 if (level <= AV_LOG_WARNING){
92 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj ){
93 LOGE("########## Ffmpeg Init ##########");
94 unsigned int v = avutil_version();
95 LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
96 v = avcodec_version();
97 LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
98 v = avformat_version();
99 LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
100 v = avdevice_version();
101 LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
103 //system("su -c chmod 666 /dev/video0");
104 system("/system/xbin/su -c echo 'wowo' >> /data/local/test");
105 system("echo 'wowow' >> /sdcard/peng/test");
107 av_log_set_level(AV_LOG_TRACE);
109 avdevice_register_all();
110 avformat_network_init();
111 av_log_set_callback(custom_log);
115 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
116 jint v = avformat_version();
117 LOGE("######### Ffmpeg JNI version i= %d", v);
119 system("su -c chmod 666 /dev/video0");
121 LOGE("######### Ffmpeg JNI version i= %d", v);
124 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
125 avdevice_register_all();
126 av_log_set_callback(custom_log);
127 AVInputFormat *ifmt=av_find_input_format("video4linux2");
128 LOGE("===%s===", ifmt->name);
129 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
130 LOGE("Couldn't open input stream.\n");
131 return env->NewStringUTF("===== error =======");
136 return env->NewStringUTF("====== Ffmpeg call =======");
139 //const char* out_path;
141 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_setRtmpUrl (JNIEnv *env, jobject obj, jstring url){
143 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
144 //out_path = env->GetStringUTFChars(url, 0);
150 //#defind JLOGE(s) javaPrint(env, obj, (s));
153 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_initnew (JNIEnv *env, jobject obj, jint width, jint height, jstring url)
155 const char * out_path= env->GetStringUTFChars(url, 0);
156 LOGE("Ffmpeg init, width=%d, heigh=%d, url=%s", width, height, out_path);
157 javaPrint(env, obj, "Ffmpeg init");
161 y_length=width*height;
162 uv_length=width*height/4;
166 avformat_network_init();
169 av_log_set_callback(custom_log222);
173 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
174 //output encoder initialize
175 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
177 LOGE("Can not find encoder!\n");
178 javaPrint(env, obj, "Can not find encoder!");
181 pCodecCtx = avcodec_alloc_context3(pCodec);
182 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
183 pCodecCtx->width = width;
184 pCodecCtx->height = height;
185 pCodecCtx->time_base.num = 1;
186 pCodecCtx->time_base.den = 30;
187 pCodecCtx->bit_rate = 800000;
188 pCodecCtx->gop_size = 300;
189 /* Some formats want stream headers to be separate. */
190 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
191 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
194 //pCodecCtx->me_range = 16;
195 //pCodecCtx->max_qdiff = 4;
196 //pCodecCtx->qcompress = 0.6;
197 pCodecCtx->qmin = 10;
198 pCodecCtx->qmax = 51;
200 pCodecCtx->max_b_frames = 3;
201 // Set H264 preset and tune
202 AVDictionary *param = 0;
203 av_dict_set(¶m, "preset", "ultrafast", 0);
204 av_dict_set(¶m, "tune", "zerolatency", 0);
206 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
207 LOGE("Failed to open encoder!\n");
208 javaPrint(env, obj, "Failed to open encoder!");
212 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
213 video_st = avformat_new_stream(ofmt_ctx, pCodec);
214 if (video_st == NULL){
217 video_st->time_base.num = 1;
218 video_st->time_base.den = 30;
219 video_st->codec = pCodecCtx;
221 //Open output URL,set before avformat_write_header() for muxing
223 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
224 LOGE("Failed to open output file! return :%s(%d)\n", av_err2str(ret),ret);
225 javaPrint(env, obj, "Failed to open output file! return!");
230 avformat_write_header(ofmt_ctx, NULL);
232 start_time = av_gettime();
233 env->ReleaseStringUTFChars(url, out_path);
238 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_inithaha (JNIEnv *env, jobject obj, jint width, jint height) {
240 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
242 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
243 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
245 // const char* out_path = "/storage/sdcard0/output.flv";
249 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
253 y_length=width*height;
254 uv_length=width*height/4;
260 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
261 //output encoder initialize
262 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
264 LOGE("Can not find encoder!\n");
267 pCodecCtx = avcodec_alloc_context3(pCodec);
268 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
269 pCodecCtx->width = width;
270 pCodecCtx->height = height;
271 pCodecCtx->time_base.num = 1;
272 pCodecCtx->time_base.den = 30;
273 pCodecCtx->bit_rate = 800000;
274 pCodecCtx->gop_size = 300;
275 /* Some formats want stream headers to be separate. */
276 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
277 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
280 //pCodecCtx->me_range = 16;
281 //pCodecCtx->max_qdiff = 4;
282 //pCodecCtx->qcompress = 0.6;
283 pCodecCtx->qmin = 10;
284 pCodecCtx->qmax = 51;
286 pCodecCtx->max_b_frames = 3;
287 // Set H264 preset and tune
288 AVDictionary *param = 0;
289 av_dict_set(¶m, "preset", "ultrafast", 0);
290 av_dict_set(¶m, "tune", "zerolatency", 0);
292 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
293 LOGE("Failed to open encoder!\n");
297 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
298 video_st = avformat_new_stream(ofmt_ctx, pCodec);
299 if (video_st == NULL){
302 video_st->time_base.num = 1;
303 video_st->time_base.den = 30;
304 video_st->codec = pCodecCtx;
306 //Open output URL,set before avformat_write_header() for muxing
308 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
309 LOGE("Failed to open output file! return :%d\n", ret);
314 avformat_write_header(ofmt_ctx, NULL);
316 start_time = av_gettime();
320 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
324 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
329 av_init_packet(&enc_pkt);
330 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
338 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
341 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
342 AVRational r_framerate1 = { 60, 2 };
343 AVRational time_base_q = { 1, AV_TIME_BASE };
344 //Duration between 2 frames (us)
345 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
347 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
348 enc_pkt.dts = enc_pkt.pts;
349 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
351 //转换PTS/DTS(Convert PTS/DTS)
354 ofmt_ctx->duration = enc_pkt.duration * framecnt;
356 /* mux encoded frame */
357 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
362 av_write_trailer(ofmt_ctx);
366 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
368 avcodec_close(video_st->codec);
369 avio_close(ofmt_ctx->pb);
370 avformat_free_context(ofmt_ctx);
376 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_processnew (JNIEnv *env, jobject obj, jbyteArray yuv){
382 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
387 //LOGE(" process data - ffmpeg");
388 pFrameYUV = av_frame_alloc();
389 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
390 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
392 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
393 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
394 memcpy(pFrameYUV->data[0],in,y_length);
395 for(i=0;i<uv_length;i++)
397 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
398 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
401 pFrameYUV->format = AV_PIX_FMT_YUV420P;
402 pFrameYUV->width = yuv_width;
403 pFrameYUV->height = yuv_height;
407 av_init_packet(&enc_pkt);
408 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
409 av_frame_free(&pFrameYUV);
411 if (enc_got_frame == 1){
412 if (framecnt % (15 * 60) == 0){
413 LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
414 javaPrint(env, obj, "Succeed to encode frame:");
418 enc_pkt.stream_index = video_st->index;
421 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
422 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
423 AVRational time_base_q = { 1, AV_TIME_BASE };
424 //Duration between 2 frames (us)
425 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
427 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
428 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
429 enc_pkt.dts = enc_pkt.pts;
430 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
434 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
435 int64_t now_time = av_gettime() - start_time;
436 if (pts_time > now_time)
437 av_usleep(pts_time - now_time);
439 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
440 av_free_packet(&enc_pkt);
446 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
452 LOGE("###### video play #####");
453 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
454 const char * file_name = env->GetStringUTFChars(fname, 0);
457 avdevice_register_all();
460 AVFormatContext * pFormatCtx = avformat_alloc_context();
464 av_log_set_callback(custom_log);
466 AVInputFormat *ifmt=av_find_input_format("video4linux2");
467 LOGE("===%s===", ifmt->name);
468 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
469 LOGE("Couldn't open file:\n");
470 return -1; // Couldn't open file
478 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
480 LOGE("Couldn't open file:%s\n", file_name);
481 return -1; // Couldn't open file
484 // Retrieve stream information
485 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
486 LOGE("Couldn't find stream information.");
490 // Find the first video stream
491 int videoStream = -1, i;
492 for (i = 0; i < pFormatCtx->nb_streams; i++) {
493 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
494 && videoStream < 0) {
498 if(videoStream==-1) {
499 LOGE("Didn't find a video stream.");
500 return -1; // Didn't find a video stream
503 // Get a pointer to the codec context for the video stream
504 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
505 LOGE("============= %d ========",__LINE__);
506 // Find the decoder for the video stream
507 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
509 LOGE("Codec not found.");
510 return -1; // Codec not found
513 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
514 LOGE("Could not open codec.");
515 return -1; // Could not open codec
519 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
522 int videoWidth = pCodecCtx->width;
523 int videoHeight = pCodecCtx->height;
525 // 设置native window的buffer大小,可自动拉伸
526 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
527 ANativeWindow_Buffer windowBuffer;
529 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
530 LOGE("Could not open codec.");
531 return -1; // Could not open codec
534 LOGE("stream format:%s", pFormatCtx->iformat->name);
535 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
536 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
537 LOGE("Decoder name:%s", pCodec->name);
539 // Allocate video frame
540 AVFrame * pFrame = av_frame_alloc();
543 AVFrame * pFrameRGBA = av_frame_alloc();
544 if(pFrameRGBA == NULL || pFrame == NULL) {
545 LOGE("Could not allocate video frame.");
549 // Determine required buffer size and allocate buffer
550 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
551 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
552 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
553 pCodecCtx->width, pCodecCtx->height, 1);
555 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
556 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
569 while(av_read_frame(pFormatCtx, &packet)>=0) {
570 // Is this a packet from the video stream?
571 if(packet.stream_index==videoStream) {
573 // Decode video frame
574 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
576 // 并不是decode一次就可解码出一帧
579 // lock native window buffer
580 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
583 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
584 pFrame->linesize, 0, pCodecCtx->height,
585 pFrameRGBA->data, pFrameRGBA->linesize);
588 uint8_t * dst = (uint8_t*) windowBuffer.bits;
589 int dstStride = windowBuffer.stride * 4;
590 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
591 int srcStride = pFrameRGBA->linesize[0];
593 // 由于window的stride和帧的stride不同,因此需要逐行复制
595 for (h = 0; h < videoHeight; h++) {
596 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
599 ANativeWindow_unlockAndPost(nativeWindow);
603 av_packet_unref(&packet);
609 // Free the YUV frame
613 avcodec_close(pCodecCtx);
615 // Close the video file
616 avformat_close_input(&pFormatCtx);
618 env->ReleaseStringUTFChars(fname, file_name);
622 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
625 av_log_set_level(AV_LOG_TRACE);
627 avformat_network_init();
628 avdevice_register_all();
631 LOGE("====push=====");
632 // av_log_set_callback(custom_log);
634 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
635 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
636 //const char* out_path = env->GetStringUTFChars(url, 0);
637 //const char * file_name = env->GetStringUTFChars(fname, 0);
642 AVFormatContext *pFormatCtx = avformat_alloc_context();
644 AVInputFormat *ifmt = av_find_input_format("video4linux2");
645 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
646 // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
648 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
652 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
653 LOGE( "could not find stream info");
657 av_dump_format(pFormatCtx, 0, "0", 0);
660 int video_index = -1;
661 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
666 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
667 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
674 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
675 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
677 AVFormatContext *ofmt_ctx;
678 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
679 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
681 LOGE("Can not find endoder");
685 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
686 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
687 oCodecCtx->width = pCodecCtx->width;
688 oCodecCtx->height = pCodecCtx->height;
689 oCodecCtx->time_base.num = 1;
690 oCodecCtx->time_base.den = 30;
691 oCodecCtx->bit_rate = 800000;
692 oCodecCtx->gop_size = 300;
693 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
694 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
695 oCodecCtx->qmin = 10;
696 oCodecCtx->qmax = 51;
697 oCodecCtx->max_b_frames = 3;
699 AVDictionary *params = 0;
700 av_dict_set(¶ms, "preset", "ultrafast", 0);
701 av_dict_set(¶ms, "tune", "zerolatency", 0);
703 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
704 LOGE("Failed to open encoder");
708 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
709 if (videoStream == NULL){
713 videoStream->time_base.num = 1;
714 videoStream->time_base.den = 30;
715 videoStream->codec = oCodecCtx;
717 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
718 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
719 //LOGE("Failed open out file22 erro=%d", ret);
723 avformat_write_header(ofmt_ctx, NULL);
730 AVFrame *pFrame, *pFrameYUV;
731 pFrame = av_frame_alloc();
732 pFrameYUV = av_frame_alloc();
734 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
735 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
736 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
738 pFrameYUV->format = AV_PIX_FMT_YUV420P;
739 pFrameYUV->width = pCodecCtx->width;
740 pFrameYUV->height = pCodecCtx->height;
742 struct SwsContext *img_convert_ctx;
743 img_convert_ctx = sws_getContext(pCodecCtx->width,
752 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
757 int64_t framecnt = 0;
759 while(av_read_frame(pFormatCtx, packet) >= 0){
760 if (packet->stream_index == video_index){
761 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
763 LOGE("Decode Error.");
767 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
771 av_init_packet(&enc_pkt);
772 int enc_got_frame = 0;
773 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
774 if (enc_got_frame == 1){
777 enc_pkt.stream_index = videoStream->index;
780 AVRational time_base = ofmt_ctx->streams[0]->time_base;
781 AVRational r_framerate1 = {60, 2};
782 AVRational time_base_q = {1, AV_TIME_BASE};
784 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
785 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
786 enc_pkt.dts = enc_pkt.pts;
787 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
790 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
792 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
793 //av_frame_free(&pFrameYUV);
794 //av_packet_unref(packet);
796 av_free_packet(&enc_pkt);
797 //av_packet_unref(&enc_pkt);
801 av_packet_unref(packet);
804 sws_freeContext(img_convert_ctx);
807 avcodec_close(pCodecCtx);
808 avformat_close_input(&pFormatCtx);
812 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
814 LOGE("###### video preview #####");
817 avdevice_register_all();
820 AVFormatContext * pFormatCtx = avformat_alloc_context();
823 av_log_set_callback(custom_log);
825 AVInputFormat *ifmt=av_find_input_format("video4linux2");
826 LOGE("===%s===", ifmt->name);
827 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
828 LOGE("Couldn't open file:\n");
829 return -1; // Couldn't open file
832 // Retrieve stream information
833 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
834 LOGE("Couldn't find stream information.");
838 // Find the first video stream
839 int videoStream = -1, i;
840 for (i = 0; i < pFormatCtx->nb_streams; i++) {
841 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
842 && videoStream < 0) {
846 if(videoStream==-1) {
847 LOGE("Didn't find a video stream.");
848 return -1; // Didn't find a video stream
851 // Get a pointer to the codec context for the video stream
852 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
853 LOGE("============= %d ========",__LINE__);
854 // Find the decoder for the video stream
855 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
857 LOGE("Codec not found.");
858 return -1; // Codec not found
861 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
862 LOGE("Could not open codec.");
863 return -1; // Could not open codec
867 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
870 int videoWidth = pCodecCtx->width;
871 int videoHeight = pCodecCtx->height;
873 // 设置native window的buffer大小,可自动拉伸
874 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
875 ANativeWindow_Buffer windowBuffer;
878 LOGE("stream format:%s", pFormatCtx->iformat->name);
879 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
880 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
881 LOGE("Decoder name:%s", pCodec->name);
883 // Allocate video frame
884 AVFrame * pFrame = av_frame_alloc();
887 AVFrame * pFrameRGBA = av_frame_alloc();
888 if(pFrameRGBA == NULL || pFrame == NULL) {
889 LOGE("Could not allocate video frame.");
893 // Determine required buffer size and allocate buffer
894 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
895 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
896 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
897 pCodecCtx->width, pCodecCtx->height, 1);
899 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
900 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
913 while(av_read_frame(pFormatCtx, &packet)>=0) {
914 // Is this a packet from the video stream?
915 if(packet.stream_index==videoStream) {
917 // Decode video frame
918 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
920 // 并不是decode一次就可解码出一帧
923 // lock native window buffer
924 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
927 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
928 pFrame->linesize, 0, pCodecCtx->height,
929 pFrameRGBA->data, pFrameRGBA->linesize);
932 uint8_t * dst = (uint8_t*) windowBuffer.bits;
933 int dstStride = windowBuffer.stride * 4;
934 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
935 int srcStride = pFrameRGBA->linesize[0];
937 // 由于window的stride和帧的stride不同,因此需要逐行复制
939 for (h = 0; h < videoHeight; h++) {
940 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
943 ANativeWindow_unlockAndPost(nativeWindow);
947 av_packet_unref(&packet);
953 // Free the YUV frame
957 avcodec_close(pCodecCtx);
959 // Close the video file
960 avformat_close_input(&pFormatCtx);
962 //env->ReleaseStringUTFChars(fname, file_name);
966 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
968 LOGE("getPerfectDevice");
969 AVFormatContext *pFormatCtx = avformat_alloc_context();
970 AVInputFormat *ifmt = av_find_input_format("video4linux2");
971 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
972 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
975 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
976 LOGE( "could not find stream info");
979 av_dump_format(pFormatCtx, 0, "0", 0);
980 avformat_free_context(pFormatCtx);
981 //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
982 system("touch /storage/sdcard0/aa");
984 return env->NewStringUTF("====== Ffmpeg call =======");
990 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
991 char path[512] = {0};
992 char* real_path = NULL;
994 LOGE("=================");
995 //system("su -c chmod 666 /dev/video0");
997 #ifdef ANDROID_USB_CAMERA
998 //MY_USB_CAMER_FD = fd;
999 avdevice_set_android_usb_fd(fd);
1001 //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
1004 sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
1005 if(path[0] != '\0'){
1006 LOGE("fd path is %s.", path);
1007 real_path = realpath(path, NULL);
1008 if(real_path != NULL){
1009 LOGE("get full path from fd %s.", real_path);
1019 LOGE("====push=====");
1020 // av_log_set_callback(custom_log);
1024 AVFormatContext *pFormatCtx = avformat_alloc_context();
1026 AVInputFormat *ifmt = av_find_input_format("video4linux2");
1027 //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
1028 if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
1030 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
1034 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
1035 LOGE( "could not find stream info");
1039 av_dump_format(pFormatCtx, 0, "0", 0);
1042 int video_index = -1;
1043 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
1048 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
1049 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
1056 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
1057 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
1059 AVFormatContext *ofmt_ctx;
1060 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
1061 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
1063 LOGE("Can not find endoder");
1067 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
1068 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
1069 oCodecCtx->width = pCodecCtx->width;
1070 oCodecCtx->height = pCodecCtx->height;
1071 oCodecCtx->time_base.num = 1;
1072 oCodecCtx->time_base.den = 30;
1073 oCodecCtx->bit_rate = 800000;
1074 oCodecCtx->gop_size = 300;
1075 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
1076 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
1077 oCodecCtx->qmin = 10;
1078 oCodecCtx->qmax = 51;
1079 oCodecCtx->max_b_frames = 3;
1081 AVDictionary *params = 0;
1082 av_dict_set(¶ms, "preset", "ultrafast", 0);
1083 av_dict_set(¶ms, "tune", "zerolatency", 0);
1085 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
1086 LOGE("Failed to open encoder");
1090 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
1091 if (videoStream == NULL){
1095 videoStream->time_base.num = 1;
1096 videoStream->time_base.den = 30;
1097 videoStream->codec = oCodecCtx;
1099 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
1100 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
1101 //LOGE("Failed open out file22 erro=%d", ret);
1105 avformat_write_header(ofmt_ctx, NULL);
1112 AVFrame *pFrame, *pFrameYUV;
1113 pFrame = av_frame_alloc();
1114 pFrameYUV = av_frame_alloc();
1116 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1117 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
1118 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1120 pFrameYUV->format = AV_PIX_FMT_YUV420P;
1121 pFrameYUV->width = pCodecCtx->width;
1122 pFrameYUV->height = pCodecCtx->height;
1124 struct SwsContext *img_convert_ctx;
1125 img_convert_ctx = sws_getContext(pCodecCtx->width,
1134 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
1135 int got_picture = 0;
1139 int64_t framecnt = 0;
1141 while(av_read_frame(pFormatCtx, packet) >= 0){
1142 if (packet->stream_index == video_index){
1143 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
1145 LOGE("Decode Error.");
1149 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
1151 enc_pkt.data = NULL;
1153 av_init_packet(&enc_pkt);
1154 int enc_got_frame = 0;
1155 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
1156 if (enc_got_frame == 1){
1159 enc_pkt.stream_index = videoStream->index;
1162 AVRational time_base = ofmt_ctx->streams[0]->time_base;
1163 AVRational r_framerate1 = {60, 2};
1164 AVRational time_base_q = {1, AV_TIME_BASE};
1166 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
1167 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
1168 enc_pkt.dts = enc_pkt.pts;
1169 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
1172 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
1174 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
1175 //av_frame_free(&pFrameYUV);
1176 //av_packet_unref(packet);
1178 av_free_packet(&enc_pkt);
1179 //av_packet_unref(&enc_pkt);
1183 av_packet_unref(packet);
1186 sws_freeContext(img_convert_ctx);
1189 avcodec_close(pCodecCtx);
1190 avformat_close_input(&pFormatCtx);