2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
10 #include "libavformat/avformat.h"
11 #include "libavcodec/avcodec.h"
12 #include "libswscale/swscale.h"
13 #include "libavutil/imgutils.h"
14 #include "libavutil/time.h"
15 #include "libavdevice/avdevice.h"
19 AVFormatContext *ofmt_ctx;
21 AVCodecContext* pCodecCtx;
27 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
31 /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
43 static int print_prefix = 1;
45 static char prev[1024];
49 av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
52 //sanitize((uint8_t *)line);
54 if (level <= AV_LOG_WARNING)
76 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
77 jint v = avformat_version();
78 LOGE("######### Ffmpeg JNI version i= %d", v);
81 /*AVFormatContext *pFormatCtx = avformat_alloc_context();
82 avdevice_register_all();
83 av_log_set_callback(custom_log);
84 AVInputFormat *ifmt=av_find_input_format("video4linux2");
85 LOGE("===%s===", ifmt->name);
86 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
87 LOGE("Couldn't open input stream.\n");
88 return env->NewStringUTF("===== error =======");
93 return env->NewStringUTF("====== Ffmpeg call =======");
96 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
98 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
100 const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
101 // const char* out_path = "/storage/sdcard0/output.flv";
105 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
109 y_length=width*height;
110 uv_length=width*height/4;
116 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
117 //output encoder initialize
118 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
120 LOGE("Can not find encoder!\n");
123 pCodecCtx = avcodec_alloc_context3(pCodec);
124 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
125 pCodecCtx->width = width;
126 pCodecCtx->height = height;
127 pCodecCtx->time_base.num = 1;
128 pCodecCtx->time_base.den = 30;
129 pCodecCtx->bit_rate = 800000;
130 pCodecCtx->gop_size = 300;
131 /* Some formats want stream headers to be separate. */
132 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
133 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
136 //pCodecCtx->me_range = 16;
137 //pCodecCtx->max_qdiff = 4;
138 //pCodecCtx->qcompress = 0.6;
139 pCodecCtx->qmin = 10;
140 pCodecCtx->qmax = 51;
142 pCodecCtx->max_b_frames = 3;
143 // Set H264 preset and tune
144 AVDictionary *param = 0;
145 av_dict_set(¶m, "preset", "ultrafast", 0);
146 av_dict_set(¶m, "tune", "zerolatency", 0);
148 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
149 LOGE("Failed to open encoder!\n");
153 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
154 video_st = avformat_new_stream(ofmt_ctx, pCodec);
155 if (video_st == NULL){
158 video_st->time_base.num = 1;
159 video_st->time_base.den = 30;
160 video_st->codec = pCodecCtx;
162 //Open output URL,set before avformat_write_header() for muxing
164 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
165 LOGE("Failed to open output file! return :%d\n", ret);
170 avformat_write_header(ofmt_ctx, NULL);
172 start_time = av_gettime();
176 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
180 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
185 av_init_packet(&enc_pkt);
186 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
194 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
197 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
198 AVRational r_framerate1 = { 60, 2 };
199 AVRational time_base_q = { 1, AV_TIME_BASE };
200 //Duration between 2 frames (us)
201 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
203 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
204 enc_pkt.dts = enc_pkt.pts;
205 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
207 //转换PTS/DTS(Convert PTS/DTS)
210 ofmt_ctx->duration = enc_pkt.duration * framecnt;
212 /* mux encoded frame */
213 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
218 av_write_trailer(ofmt_ctx);
222 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
224 avcodec_close(video_st->codec);
225 avio_close(ofmt_ctx->pb);
226 avformat_free_context(ofmt_ctx);
230 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
235 //LOGE(" process data - ffmpeg");
236 pFrameYUV = av_frame_alloc();
237 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
238 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
240 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
241 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
242 memcpy(pFrameYUV->data[0],in,y_length);
243 for(i=0;i<uv_length;i++)
245 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
246 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
249 pFrameYUV->format = AV_PIX_FMT_YUV420P;
250 pFrameYUV->width = yuv_width;
251 pFrameYUV->height = yuv_height;
255 av_init_packet(&enc_pkt);
256 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
257 av_frame_free(&pFrameYUV);
259 if (enc_got_frame == 1){
260 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
262 enc_pkt.stream_index = video_st->index;
265 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
266 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
267 AVRational time_base_q = { 1, AV_TIME_BASE };
268 //Duration between 2 frames (us)
269 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
271 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
272 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
273 enc_pkt.dts = enc_pkt.pts;
274 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
278 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
279 int64_t now_time = av_gettime() - start_time;
280 if (pts_time > now_time)
281 av_usleep(pts_time - now_time);
283 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
284 av_free_packet(&enc_pkt);
289 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
295 LOGE("###### video play #####");
296 // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
297 const char * file_name = env->GetStringUTFChars(fname, 0);
300 avdevice_register_all();
303 AVFormatContext * pFormatCtx = avformat_alloc_context();
307 av_log_set_callback(custom_log);
309 AVInputFormat *ifmt=av_find_input_format("video4linux2");
310 LOGE("===%s===", ifmt->name);
311 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
312 LOGE("Couldn't open file:\n");
313 return -1; // Couldn't open file
321 if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
323 LOGE("Couldn't open file:%s\n", file_name);
324 return -1; // Couldn't open file
327 // Retrieve stream information
328 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
329 LOGE("Couldn't find stream information.");
333 // Find the first video stream
334 int videoStream = -1, i;
335 for (i = 0; i < pFormatCtx->nb_streams; i++) {
336 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
337 && videoStream < 0) {
341 if(videoStream==-1) {
342 LOGE("Didn't find a video stream.");
343 return -1; // Didn't find a video stream
346 // Get a pointer to the codec context for the video stream
347 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
348 LOGE("============= %d ========",__LINE__);
349 // Find the decoder for the video stream
350 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
352 LOGE("Codec not found.");
353 return -1; // Codec not found
356 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
357 LOGE("Could not open codec.");
358 return -1; // Could not open codec
362 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
365 int videoWidth = pCodecCtx->width;
366 int videoHeight = pCodecCtx->height;
368 // 设置native window的buffer大小,可自动拉伸
369 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
370 ANativeWindow_Buffer windowBuffer;
372 if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
373 LOGE("Could not open codec.");
374 return -1; // Could not open codec
377 LOGE("stream format:%s", pFormatCtx->iformat->name);
378 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
379 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
380 LOGE("Decoder name:%s", pCodec->name);
382 // Allocate video frame
383 AVFrame * pFrame = av_frame_alloc();
386 AVFrame * pFrameRGBA = av_frame_alloc();
387 if(pFrameRGBA == NULL || pFrame == NULL) {
388 LOGE("Could not allocate video frame.");
392 // Determine required buffer size and allocate buffer
393 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
394 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
395 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
396 pCodecCtx->width, pCodecCtx->height, 1);
398 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
399 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
412 while(av_read_frame(pFormatCtx, &packet)>=0) {
413 // Is this a packet from the video stream?
414 if(packet.stream_index==videoStream) {
416 // Decode video frame
417 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
419 // 并不是decode一次就可解码出一帧
422 // lock native window buffer
423 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
426 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
427 pFrame->linesize, 0, pCodecCtx->height,
428 pFrameRGBA->data, pFrameRGBA->linesize);
431 uint8_t * dst = (uint8_t*) windowBuffer.bits;
432 int dstStride = windowBuffer.stride * 4;
433 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
434 int srcStride = pFrameRGBA->linesize[0];
436 // 由于window的stride和帧的stride不同,因此需要逐行复制
438 for (h = 0; h < videoHeight; h++) {
439 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
442 ANativeWindow_unlockAndPost(nativeWindow);
446 av_packet_unref(&packet);
452 // Free the YUV frame
456 avcodec_close(pCodecCtx);
458 // Close the video file
459 avformat_close_input(&pFormatCtx);
461 env->ReleaseStringUTFChars(fname, file_name);
465 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface) {
467 av_log_set_level(AV_LOG_TRACE);
469 avformat_network_init();
470 avdevice_register_all();
472 LOGE("====push=====");
473 av_log_set_callback(custom_log);
477 AVFormatContext *pFormatCtx = avformat_alloc_context();
479 AVInputFormat *ifmt = av_find_input_format("video4linux2");
480 if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
481 LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
485 if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
486 LOGE( "could not find stream info");
490 av_dump_format(pFormatCtx, 0, "0", 0);
493 int video_index = -1;
494 if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
499 AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
500 if(avcodec_open2(pCodecCtx, dec, NULL) <0){
507 const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
509 AVFormatContext *ofmt_ctx;
510 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
511 AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
513 LOGE("Can not find endoder");
517 AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
518 oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
519 oCodecCtx->width = pCodecCtx->width;
520 oCodecCtx->height = pCodecCtx->height;
521 oCodecCtx->time_base.num = 1;
522 oCodecCtx->time_base.den = 30;
523 oCodecCtx->bit_rate = 800000;
524 oCodecCtx->gop_size = 300;
525 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
526 oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
527 oCodecCtx->qmin = 10;
528 oCodecCtx->qmax = 51;
529 oCodecCtx->max_b_frames = 3;
531 AVDictionary *params = 0;
532 av_dict_set(¶ms, "preset", "ultrafast", 0);
533 av_dict_set(¶ms, "tune", "zerolatency", 0);
535 if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){
536 LOGE("Failed to open encoder");
540 AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
541 if (videoStream == NULL){
545 videoStream->time_base.num = 1;
546 videoStream->time_base.den = 30;
547 videoStream->codec = oCodecCtx;
549 if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
550 LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
551 //LOGE("Failed open out file22 erro=%d", ret);
555 avformat_write_header(ofmt_ctx, NULL);
562 AVFrame *pFrame, *pFrameYUV;
563 pFrame = av_frame_alloc();
564 pFrameYUV = av_frame_alloc();
566 int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
567 uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
568 av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
570 pFrameYUV->format = AV_PIX_FMT_YUV420P;
571 pFrameYUV->width = pCodecCtx->width;
572 pFrameYUV->height = pCodecCtx->height;
574 struct SwsContext *img_convert_ctx;
575 img_convert_ctx = sws_getContext(pCodecCtx->width,
584 AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
589 int64_t framecnt = 0;
591 while(av_read_frame(pFormatCtx, packet) >= 0){
592 if (packet->stream_index == video_index){
593 ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
595 LOGE("Decode Error.");
599 sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
603 av_init_packet(&enc_pkt);
604 int enc_got_frame = 0;
605 ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
606 if (enc_got_frame == 1){
609 enc_pkt.stream_index = videoStream->index;
612 AVRational time_base = ofmt_ctx->streams[0]->time_base;
613 AVRational r_framerate1 = {60, 2};
614 AVRational time_base_q = {1, AV_TIME_BASE};
616 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
617 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
618 enc_pkt.dts = enc_pkt.pts;
619 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
622 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
624 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
625 //av_frame_free(&pFrameYUV);
626 //av_packet_unref(packet);
628 av_free_packet(&enc_pkt);
629 //av_packet_unref(&enc_pkt);
637 av_packet_unref(packet);
640 sws_freeContext(img_convert_ctx);
643 avcodec_close(pCodecCtx);
644 avformat_close_input(&pFormatCtx);
648 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
650 LOGE("###### video preview #####");
653 avdevice_register_all();
656 AVFormatContext * pFormatCtx = avformat_alloc_context();
659 av_log_set_callback(custom_log);
661 AVInputFormat *ifmt=av_find_input_format("video4linux2");
662 LOGE("===%s===", ifmt->name);
663 if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
664 LOGE("Couldn't open file:\n");
665 return -1; // Couldn't open file
668 // Retrieve stream information
669 if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
670 LOGE("Couldn't find stream information.");
674 // Find the first video stream
675 int videoStream = -1, i;
676 for (i = 0; i < pFormatCtx->nb_streams; i++) {
677 if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
678 && videoStream < 0) {
682 if(videoStream==-1) {
683 LOGE("Didn't find a video stream.");
684 return -1; // Didn't find a video stream
687 // Get a pointer to the codec context for the video stream
688 AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
689 LOGE("============= %d ========",__LINE__);
690 // Find the decoder for the video stream
691 AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
693 LOGE("Codec not found.");
694 return -1; // Codec not found
697 if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
698 LOGE("Could not open codec.");
699 return -1; // Could not open codec
703 ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
706 int videoWidth = pCodecCtx->width;
707 int videoHeight = pCodecCtx->height;
709 // 设置native window的buffer大小,可自动拉伸
710 ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
711 ANativeWindow_Buffer windowBuffer;
714 LOGE("stream format:%s", pFormatCtx->iformat->name);
715 LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
716 LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
717 LOGE("Decoder name:%s", pCodec->name);
719 // Allocate video frame
720 AVFrame * pFrame = av_frame_alloc();
723 AVFrame * pFrameRGBA = av_frame_alloc();
724 if(pFrameRGBA == NULL || pFrame == NULL) {
725 LOGE("Could not allocate video frame.");
729 // Determine required buffer size and allocate buffer
730 int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
731 uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
732 av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
733 pCodecCtx->width, pCodecCtx->height, 1);
735 // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
736 struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
749 while(av_read_frame(pFormatCtx, &packet)>=0) {
750 // Is this a packet from the video stream?
751 if(packet.stream_index==videoStream) {
753 // Decode video frame
754 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
756 // 并不是decode一次就可解码出一帧
759 // lock native window buffer
760 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
763 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
764 pFrame->linesize, 0, pCodecCtx->height,
765 pFrameRGBA->data, pFrameRGBA->linesize);
768 uint8_t * dst = (uint8_t*) windowBuffer.bits;
769 int dstStride = windowBuffer.stride * 4;
770 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
771 int srcStride = pFrameRGBA->linesize[0];
773 // 由于window的stride和帧的stride不同,因此需要逐行复制
775 for (h = 0; h < videoHeight; h++) {
776 memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
779 ANativeWindow_unlockAndPost(nativeWindow);
783 av_packet_unref(&packet);
789 // Free the YUV frame
793 avcodec_close(pCodecCtx);
795 // Close the video file
796 avformat_close_input(&pFormatCtx);
798 //env->ReleaseStringUTFChars(fname, file_name);
802 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {