2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
15 #include "libavformat/avformat.h"
16 #include "libavcodec/avcodec.h"
17 #include "libswscale/swscale.h"
18 #include "libavutil/imgutils.h"
19 #include "libavutil/time.h"
20 #include "libavdevice/avdevice.h"
24 AVFormatContext *ofmt_ctx;
26 AVCodecContext* pCodecCtx;
31 void javaPrint(JNIEnv *env, jobject obj, const char* str)
33 jclass clazz = (*env).GetObjectClass(obj);
34 jobject mobj = env->NewGlobalRef(obj);
35 jmethodID mmid = env->GetMethodID(clazz, "print", "(Ljava/lang/String;)V");
36 jstring jstr = env->NewStringUTF(str);
37 env->CallVoidMethod(mobj, mmid, jstr);
38 env->DeleteLocalRef(jstr);
44 void custom_log222 (void *ptr, int level, const char* fmt, va_list vl){
45 static int print_prefix = 1;
47 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
48 if (level <= AV_LOG_WARNING){
50 javaPrint(g_env, g_obj, line);
53 //javaPrint(g_env, g_obj, line);
57 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
59 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
67 static int print_prefix = 1;
68 //static char prev[1024];
71 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
74 //sanitize((uint8_t *)line);
76 if (level <= AV_LOG_WARNING){
91 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj ){
92 LOGE("########## Ffmpeg Init ##########");
93 unsigned int v = avutil_version();
94 LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
95 v = avcodec_version();
96 LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
97 v = avformat_version();
98 LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
99 v = avdevice_version();
100 LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
102 //system("su -c chmod 666 /dev/video0");
103 system("/system/xbin/su -c echo 'wowo' >> /data/local/test");
104 system("echo 'wowow' >> /sdcard/peng/test");
106 av_log_set_level(AV_LOG_TRACE);
108 avdevice_register_all();
109 avformat_network_init();
110 av_log_set_callback(custom_log);
114 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
115 jint v = avformat_version();
116 LOGE("######### Ffmpeg JNI version i= %d", v);
118 system("su -c chmod 666 /dev/video0");
120 LOGE("######### Ffmpeg JNI version i= %d", v);
123 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
124 avdevice_register_all();
125 av_log_set_callback(custom_log);
126 AVInputFormat *ifmt=av_find_input_format("video4linux2");
127 LOGE("===%s===", ifmt->name);
128 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
129 LOGE("Couldn't open input stream.\n");
130 return env->NewStringUTF("===== error =======");
135 return env->NewStringUTF("====== Ffmpeg call =======");
138 //const char* out_path;
140 JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_setRtmpUrl (JNIEnv *env, jobject obj, jstring url){
142 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
143 //out_path = env->GetStringUTFChars(url, 0);
148 //#defind JLOGE(s) javaPrint(env, obj, (s));
151 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_initnew (JNIEnv *env, jobject obj, jint width, jint height, jstring url)
153 const char * out_path= env->GetStringUTFChars(url, 0);
154 LOGE("Ffmpeg init, width=%d, heigh=%d, url=%s", width, height, out_path);
155 javaPrint(env, obj, "Ffmpeg init");
159 y_length=width*height;
160 uv_length=width*height/4;
164 avformat_network_init();
167 av_log_set_callback(custom_log222);
171 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
172 //output encoder initialize
173 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
175 LOGE("Can not find encoder!\n");
176 javaPrint(env, obj, "Can not find encoder!");
179 pCodecCtx = avcodec_alloc_context3(pCodec);
180 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
181 pCodecCtx->width = width;
182 pCodecCtx->height = height;
183 pCodecCtx->time_base.num = 1;
184 pCodecCtx->time_base.den = 30;
185 pCodecCtx->bit_rate = 800000;
186 pCodecCtx->gop_size = 300;
187 /* Some formats want stream headers to be separate. */
188 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
189 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
192 //pCodecCtx->me_range = 16;
193 //pCodecCtx->max_qdiff = 4;
194 //pCodecCtx->qcompress = 0.6;
195 pCodecCtx->qmin = 10;
196 pCodecCtx->qmax = 51;
198 pCodecCtx->max_b_frames = 3;
199 // Set H264 preset and tune
200 AVDictionary *param = 0;
201 av_dict_set(¶m, "preset", "ultrafast", 0);
202 av_dict_set(¶m, "tune", "zerolatency", 0);
204 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
205 LOGE("Failed to open encoder!\n");
206 javaPrint(env, obj, "Failed to open encoder!");
210 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
211 video_st = avformat_new_stream(ofmt_ctx, pCodec);
212 if (video_st == NULL){
215 video_st->time_base.num = 1;
216 video_st->time_base.den = 30;
217 video_st->codec = pCodecCtx;
219 //Open output URL,set before avformat_write_header() for muxing
221 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
222 LOGE("Failed to open output file! return :%s(%d)\n", av_err2str(ret),ret);
223 javaPrint(env, obj, "Failed to open output file! return!");
228 avformat_write_header(ofmt_ctx, NULL);
230 start_time = av_gettime();
231 env->ReleaseStringUTFChars(url, out_path);
236 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_inithaha (JNIEnv *env, jobject obj, jint width, jint height) {
238 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
240 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
241 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
243 // const char* out_path = "/storage/sdcard0/output.flv";
247 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
251 y_length=width*height;
252 uv_length=width*height/4;
258 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
259 //output encoder initialize
260 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
262 LOGE("Can not find encoder!\n");
265 pCodecCtx = avcodec_alloc_context3(pCodec);
266 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
267 pCodecCtx->width = width;
268 pCodecCtx->height = height;
269 pCodecCtx->time_base.num = 1;
270 pCodecCtx->time_base.den = 30;
271 pCodecCtx->bit_rate = 800000;
272 pCodecCtx->gop_size = 300;
273 /* Some formats want stream headers to be separate. */
274 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
275 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
278 //pCodecCtx->me_range = 16;
279 //pCodecCtx->max_qdiff = 4;
280 //pCodecCtx->qcompress = 0.6;
281 pCodecCtx->qmin = 10;
282 pCodecCtx->qmax = 51;
284 pCodecCtx->max_b_frames = 3;
285 // Set H264 preset and tune
286 AVDictionary *param = 0;
287 av_dict_set(¶m, "preset", "ultrafast", 0);
288 av_dict_set(¶m, "tune", "zerolatency", 0);
290 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
291 LOGE("Failed to open encoder!\n");
295 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
296 video_st = avformat_new_stream(ofmt_ctx, pCodec);
297 if (video_st == NULL){
300 video_st->time_base.num = 1;
301 video_st->time_base.den = 30;
302 video_st->codec = pCodecCtx;
304 //Open output URL,set before avformat_write_header() for muxing
306 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
307 LOGE("Failed to open output file! return :%d\n", ret);
312 avformat_write_header(ofmt_ctx, NULL);
314 start_time = av_gettime();
318 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
322 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
327 av_init_packet(&enc_pkt);
328 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
336 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
339 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
340 AVRational r_framerate1 = { 60, 2 };
341 AVRational time_base_q = { 1, AV_TIME_BASE };
342 //Duration between 2 frames (us)
343 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
345 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
346 enc_pkt.dts = enc_pkt.pts;
347 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
349 //转换PTS/DTS(Convert PTS/DTS)
352 ofmt_ctx->duration = enc_pkt.duration * framecnt;
354 /* mux encoded frame */
355 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
360 av_write_trailer(ofmt_ctx);
364 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
366 avcodec_close(video_st->codec);
367 avio_close(ofmt_ctx->pb);
368 avformat_free_context(ofmt_ctx);
374 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_processnew (JNIEnv *env, jobject obj, jbyteArray yuv){
380 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
385 //LOGE(" process data - ffmpeg");
386 pFrameYUV = av_frame_alloc();
387 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
388 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
390 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
391 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
392 memcpy(pFrameYUV->data[0],in,y_length);
393 for(i=0;i<uv_length;i++)
395 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
396 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
399 pFrameYUV->format = AV_PIX_FMT_YUV420P;
400 pFrameYUV->width = yuv_width;
401 pFrameYUV->height = yuv_height;
405 av_init_packet(&enc_pkt);
406 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
407 av_frame_free(&pFrameYUV);
409 if (enc_got_frame == 1){
410 if (framecnt % (15 * 60) == 0){
411 LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
412 javaPrint(env, obj, "Succeed to encode frame:");
416 enc_pkt.stream_index = video_st->index;
419 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
420 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
421 AVRational time_base_q = { 1, AV_TIME_BASE };
422 //Duration between 2 frames (us)
423 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
425 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
426 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
427 enc_pkt.dts = enc_pkt.pts;
428 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
432 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
433 int64_t now_time = av_gettime() - start_time;
434 if (pts_time > now_time)
435 av_usleep(pts_time - now_time);
437 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
438 av_free_packet(&enc_pkt);
444 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
450 LOGE("###### video play #####");
451 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
452 const char * file_name = env->GetStringUTFChars(fname, 0);
455 avdevice_register_all();
458 AVFormatContext * pFormatCtx = avformat_alloc_context();
462 av_log_set_callback(custom_log);
464 AVInputFormat *ifmt=av_find_input_format("video4linux2");
465 LOGE("===%s===", ifmt->name);
466 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
467 LOGE("Couldn't open file:\n");
468 return -1; // Couldn't open file
476 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
478 LOGE("Couldn't open file:%s\n", file_name);
479 return -1; // Couldn't open file
482 // Retrieve stream information
483 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
484 LOGE("Couldn't find stream information.");
488 // Find the first video stream
489 int videoStream = -1, i;
490 for (i = 0; i < pFormatCtx->nb_streams; i++) {
491 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
492 && videoStream < 0) {
496 if(videoStream==-1) {
497 LOGE("Didn't find a video stream.");
498 return -1; // Didn't find a video stream
501 // Get a pointer to the codec context for the video stream
502 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
503 LOGE("============= %d ========",__LINE__);
504 // Find the decoder for the video stream
505 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
507 LOGE("Codec not found.");
508 return -1; // Codec not found
511 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
512 LOGE("Could not open codec.");
513 return -1; // Could not open codec
517 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
520 int videoWidth = pCodecCtx->width;
521 int videoHeight = pCodecCtx->height;
523 // 设置native window的buffer大小,可自动拉伸
524 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
525 ANativeWindow_Buffer windowBuffer;
527 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
528 LOGE("Could not open codec.");
529 return -1; // Could not open codec
532 LOGE("stream format:%s", pFormatCtx->iformat->name);
533 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
534 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
535 LOGE("Decoder name:%s", pCodec->name);
537 // Allocate video frame
538 AVFrame * pFrame = av_frame_alloc();
541 AVFrame * pFrameRGBA = av_frame_alloc();
542 if(pFrameRGBA == NULL || pFrame == NULL) {
543 LOGE("Could not allocate video frame.");
547 // Determine required buffer size and allocate buffer
548 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
549 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
550 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
551 pCodecCtx->width, pCodecCtx->height, 1);
553 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
554 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
567 while(av_read_frame(pFormatCtx, &packet)>=0) {
568 // Is this a packet from the video stream?
569 if(packet.stream_index==videoStream) {
571 // Decode video frame
572 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
574 // 并不是decode一次就可解码出一帧
577 // lock native window buffer
578 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
581 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
582 pFrame->linesize, 0, pCodecCtx->height,
583 pFrameRGBA->data, pFrameRGBA->linesize);
586 uint8_t * dst = (uint8_t*) windowBuffer.bits;
587 int dstStride = windowBuffer.stride * 4;
588 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
589 int srcStride = pFrameRGBA->linesize[0];
591 // 由于window的stride和帧的stride不同,因此需要逐行复制
593 for (h = 0; h < videoHeight; h++) {
594 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
597 ANativeWindow_unlockAndPost(nativeWindow);
601 av_packet_unref(&packet);
607 // Free the YUV frame
611 avcodec_close(pCodecCtx);
613 // Close the video file
614 avformat_close_input(&pFormatCtx);
616 env->ReleaseStringUTFChars(fname, file_name);
620 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
623 av_log_set_level(AV_LOG_TRACE);
625 avformat_network_init();
626 avdevice_register_all();
629 LOGE("====push=====");
630 // av_log_set_callback(custom_log);
632 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
633 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
634 //const char* out_path = env->GetStringUTFChars(url, 0);
635 //const char * file_name = env->GetStringUTFChars(fname, 0);
640 AVFormatContext *pFormatCtx = avformat_alloc_context();
642 AVInputFormat *ifmt = av_find_input_format("video4linux2");
643 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
644 // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
646 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
650 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
651 LOGE( "could not find stream info");
655 av_dump_format(pFormatCtx, 0, "0", 0);
658 int video_index = -1;
659 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
664 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
665 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
672 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
673 //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
675 AVFormatContext *ofmt_ctx;
676 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
677 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
679 LOGE("Can not find endoder");
683 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
684 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
685 oCodecCtx->width = pCodecCtx->width;
686 oCodecCtx->height = pCodecCtx->height;
687 oCodecCtx->time_base.num = 1;
688 oCodecCtx->time_base.den = 30;
689 oCodecCtx->bit_rate = 800000;
690 oCodecCtx->gop_size = 300;
691 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
692 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
693 oCodecCtx->qmin = 10;
694 oCodecCtx->qmax = 51;
695 oCodecCtx->max_b_frames = 3;
697 AVDictionary *params = 0;
698 av_dict_set(¶ms, "preset", "ultrafast", 0);
699 av_dict_set(¶ms, "tune", "zerolatency", 0);
701 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
702 LOGE("Failed to open encoder");
706 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
707 if (videoStream == NULL){
711 videoStream->time_base.num = 1;
712 videoStream->time_base.den = 30;
713 videoStream->codec = oCodecCtx;
715 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
716 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
717 //LOGE("Failed open out file22 erro=%d", ret);
721 avformat_write_header(ofmt_ctx, NULL);
728 AVFrame *pFrame, *pFrameYUV;
729 pFrame = av_frame_alloc();
730 pFrameYUV = av_frame_alloc();
732 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
733 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
734 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
736 pFrameYUV->format = AV_PIX_FMT_YUV420P;
737 pFrameYUV->width = pCodecCtx->width;
738 pFrameYUV->height = pCodecCtx->height;
740 struct SwsContext *img_convert_ctx;
741 img_convert_ctx = sws_getContext(pCodecCtx->width,
750 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
755 int64_t framecnt = 0;
757 while(av_read_frame(pFormatCtx, packet) >= 0){
758 if (packet->stream_index == video_index){
759 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
761 LOGE("Decode Error.");
765 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
769 av_init_packet(&enc_pkt);
770 int enc_got_frame = 0;
771 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
772 if (enc_got_frame == 1){
775 enc_pkt.stream_index = videoStream->index;
778 AVRational time_base = ofmt_ctx->streams[0]->time_base;
779 AVRational r_framerate1 = {60, 2};
780 AVRational time_base_q = {1, AV_TIME_BASE};
782 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
783 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
784 enc_pkt.dts = enc_pkt.pts;
785 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
788 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
790 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
791 //av_frame_free(&pFrameYUV);
792 //av_packet_unref(packet);
794 av_free_packet(&enc_pkt);
795 //av_packet_unref(&enc_pkt);
799 av_packet_unref(packet);
802 sws_freeContext(img_convert_ctx);
805 avcodec_close(pCodecCtx);
806 avformat_close_input(&pFormatCtx);
810 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
812 LOGE("###### video preview #####");
815 avdevice_register_all();
818 AVFormatContext * pFormatCtx = avformat_alloc_context();
821 av_log_set_callback(custom_log);
823 AVInputFormat *ifmt=av_find_input_format("video4linux2");
824 LOGE("===%s===", ifmt->name);
825 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
826 LOGE("Couldn't open file:\n");
827 return -1; // Couldn't open file
830 // Retrieve stream information
831 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
832 LOGE("Couldn't find stream information.");
836 // Find the first video stream
837 int videoStream = -1, i;
838 for (i = 0; i < pFormatCtx->nb_streams; i++) {
839 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
840 && videoStream < 0) {
844 if(videoStream==-1) {
845 LOGE("Didn't find a video stream.");
846 return -1; // Didn't find a video stream
849 // Get a pointer to the codec context for the video stream
850 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
851 LOGE("============= %d ========",__LINE__);
852 // Find the decoder for the video stream
853 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
855 LOGE("Codec not found.");
856 return -1; // Codec not found
859 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
860 LOGE("Could not open codec.");
861 return -1; // Could not open codec
865 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
868 int videoWidth = pCodecCtx->width;
869 int videoHeight = pCodecCtx->height;
871 // 设置native window的buffer大小,可自动拉伸
872 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
873 ANativeWindow_Buffer windowBuffer;
876 LOGE("stream format:%s", pFormatCtx->iformat->name);
877 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
878 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
879 LOGE("Decoder name:%s", pCodec->name);
881 // Allocate video frame
882 AVFrame * pFrame = av_frame_alloc();
885 AVFrame * pFrameRGBA = av_frame_alloc();
886 if(pFrameRGBA == NULL || pFrame == NULL) {
887 LOGE("Could not allocate video frame.");
891 // Determine required buffer size and allocate buffer
892 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
893 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
894 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
895 pCodecCtx->width, pCodecCtx->height, 1);
897 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
898 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
911 while(av_read_frame(pFormatCtx, &packet)>=0) {
912 // Is this a packet from the video stream?
913 if(packet.stream_index==videoStream) {
915 // Decode video frame
916 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
918 // 并不是decode一次就可解码出一帧
921 // lock native window buffer
922 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
925 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
926 pFrame->linesize, 0, pCodecCtx->height,
927 pFrameRGBA->data, pFrameRGBA->linesize);
930 uint8_t * dst = (uint8_t*) windowBuffer.bits;
931 int dstStride = windowBuffer.stride * 4;
932 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
933 int srcStride = pFrameRGBA->linesize[0];
935 // 由于window的stride和帧的stride不同,因此需要逐行复制
937 for (h = 0; h < videoHeight; h++) {
938 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
941 ANativeWindow_unlockAndPost(nativeWindow);
945 av_packet_unref(&packet);
951 // Free the YUV frame
955 avcodec_close(pCodecCtx);
957 // Close the video file
958 avformat_close_input(&pFormatCtx);
960 //env->ReleaseStringUTFChars(fname, file_name);
964 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
966 LOGE("getPerfectDevice");
967 AVFormatContext *pFormatCtx = avformat_alloc_context();
968 AVInputFormat *ifmt = av_find_input_format("video4linux2");
969 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
970 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
973 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
974 LOGE( "could not find stream info");
977 av_dump_format(pFormatCtx, 0, "0", 0);
978 avformat_free_context(pFormatCtx);
979 //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
980 system("touch /storage/sdcard0/aa");
982 return env->NewStringUTF("====== Ffmpeg call =======");
988 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
989 char path[512] = {0};
990 char* real_path = NULL;
992 LOGE("=================");
993 //system("su -c chmod 666 /dev/video0");
995 #ifdef ANDROID_USB_CAMERA
996 //MY_USB_CAMER_FD = fd;
997 avdevice_set_android_usb_fd(fd);
999 //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
1002 sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
1003 if(path[0] != '\0'){
1004 LOGE("fd path is %s.", path);
1005 real_path = realpath(path, NULL);
1006 if(real_path != NULL){
1007 LOGE("get full path from fd %s.", real_path);
1017 LOGE("====push=====");
1018 // av_log_set_callback(custom_log);
1022 AVFormatContext *pFormatCtx = avformat_alloc_context();
1024 AVInputFormat *ifmt = av_find_input_format("video4linux2");
1025 //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
1026 if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
1028 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
1032 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
1033 LOGE( "could not find stream info");
1037 av_dump_format(pFormatCtx, 0, "0", 0);
1040 int video_index = -1;
1041 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
1046 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
1047 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
1054 //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
1055 const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
1057 AVFormatContext *ofmt_ctx;
1058 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
1059 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
1061 LOGE("Can not find endoder");
1065 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
1066 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
1067 oCodecCtx->width = pCodecCtx->width;
1068 oCodecCtx->height = pCodecCtx->height;
1069 oCodecCtx->time_base.num = 1;
1070 oCodecCtx->time_base.den = 30;
1071 oCodecCtx->bit_rate = 800000;
1072 oCodecCtx->gop_size = 300;
1073 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
1074 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
1075 oCodecCtx->qmin = 10;
1076 oCodecCtx->qmax = 51;
1077 oCodecCtx->max_b_frames = 3;
1079 AVDictionary *params = 0;
1080 av_dict_set(¶ms, "preset", "ultrafast", 0);
1081 av_dict_set(¶ms, "tune", "zerolatency", 0);
1083 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
1084 LOGE("Failed to open encoder");
1088 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
1089 if (videoStream == NULL){
1093 videoStream->time_base.num = 1;
1094 videoStream->time_base.den = 30;
1095 videoStream->codec = oCodecCtx;
1097 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
1098 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
1099 //LOGE("Failed open out file22 erro=%d", ret);
1103 avformat_write_header(ofmt_ctx, NULL);
1110 AVFrame *pFrame, *pFrameYUV;
1111 pFrame = av_frame_alloc();
1112 pFrameYUV = av_frame_alloc();
1114 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1115 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
1116 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
1118 pFrameYUV->format = AV_PIX_FMT_YUV420P;
1119 pFrameYUV->width = pCodecCtx->width;
1120 pFrameYUV->height = pCodecCtx->height;
1122 struct SwsContext *img_convert_ctx;
1123 img_convert_ctx = sws_getContext(pCodecCtx->width,
1132 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
1133 int got_picture = 0;
1137 int64_t framecnt = 0;
1139 while(av_read_frame(pFormatCtx, packet) >= 0){
1140 if (packet->stream_index == video_index){
1141 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
1143 LOGE("Decode Error.");
1147 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
1149 enc_pkt.data = NULL;
1151 av_init_packet(&enc_pkt);
1152 int enc_got_frame = 0;
1153 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
1154 if (enc_got_frame == 1){
1157 enc_pkt.stream_index = videoStream->index;
1160 AVRational time_base = ofmt_ctx->streams[0]->time_base;
1161 AVRational r_framerate1 = {60, 2};
1162 AVRational time_base_q = {1, AV_TIME_BASE};
1164 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
1165 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
1166 enc_pkt.dts = enc_pkt.pts;
1167 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
1170 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
1172 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
1173 //av_frame_free(&pFrameYUV);
1174 //av_packet_unref(packet);
1176 av_free_packet(&enc_pkt);
1177 //av_packet_unref(&enc_pkt);
1181 av_packet_unref(packet);
1184 sws_freeContext(img_convert_ctx);
1187 avcodec_close(pCodecCtx);
1188 avformat_close_input(&pFormatCtx);