Merge branch 'dev'
[rtmpclient.git] / app / src / main / jni / UVCCamera / ai_suanzi_rtmpclient_Ffmpeg.cpp
diff --git a/app/src/main/jni/UVCCamera/ai_suanzi_rtmpclient_Ffmpeg.cpp b/app/src/main/jni/UVCCamera/ai_suanzi_rtmpclient_Ffmpeg.cpp
deleted file mode 100644 (file)
index dfc5378..0000000
+++ /dev/null
@@ -1,1056 +0,0 @@
-//
-// Created by Peng Li on 30/4/2018.
-//
-#include "ai_suanzi_rtmpclient_Ffmpeg.h"
-#include <android/native_window.h>
-#include <android/native_window_jni.h>
-#include "log.h"
-#include <stdlib.h>
-#include <sys/stat.h>
-#include <fcntl.h>
-#include <limits.h>
-#include <unistd.h>
-
-extern "C" {
-    #include "libavformat/avformat.h"
-    #include "libavcodec/avcodec.h"
-    #include "libswscale/swscale.h"
-    #include "libavutil/imgutils.h"
-    #include "libavutil/time.h"
-    #include "libavdevice/avdevice.h"
-}
-
-int64_t start_time;
-AVFormatContext *ofmt_ctx;
-AVStream* video_st;
-AVCodecContext* pCodecCtx;
-AVCodec* pCodec;
-AVPacket enc_pkt;
-AVFrame *pFrameYUV;
-
-
-void custom_log(void *ptr, int level, const char* fmt, va_list vl){
-    //To TXT file
-    /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
-    if(fp){
-    vfprintf(fp,fmt,vl);
-    fflush(fp);
-    fclose(fp);
-    }  */
-    //To Logcat
-    // LOGE(fmt, vl);
-    static int print_prefix = 1;
-    //static char prev[1024];
-    char line[1024];
-
-    av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
-
-    //strcpy(prev, line);
-    //sanitize((uint8_t *)line);
-
-    if (level <= AV_LOG_WARNING){
-        LOGE("%s", line);
-    } else {
-        LOGE("%s", line);
-    }
-}
-
-
-int framecnt = 0;
-int yuv_width;
-int yuv_height;
-int y_length;
-int uv_length;
-
-JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init__ (JNIEnv *env, jobject obj ){
-    LOGE("########## Ffmpeg Init ##########");
-    unsigned int v = avutil_version();
-    LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
-    v = avcodec_version();
-    LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
-    v = avformat_version();
-    LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
-    v = avdevice_version();
-    LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
-
-    av_log_set_level(AV_LOG_TRACE);
-    av_register_all();
-    avdevice_register_all();
-    avformat_network_init();
-    av_log_set_callback(custom_log);
-}
-
-
-JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
-    jint v = avformat_version();
-        LOGE("######### Ffmpeg JNI version i= %d", v);
-
-        system("su -c chmod 666 /dev/video0");
-
-    LOGE("######### Ffmpeg JNI version i= %d", v);
-
-
-    /*AVFormatContext *pFormatCtx = avformat_alloc_context();
-            avdevice_register_all();
-              av_log_set_callback(custom_log);
-        AVInputFormat *ifmt=av_find_input_format("video4linux2");
-        LOGE("===%s===", ifmt->name);
-        if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
-            LOGE("Couldn't open input stream.\n");
-                return env->NewStringUTF("===== error =======");
-
-            //return -1;
-        }*/
-
-    return env->NewStringUTF("====== Ffmpeg call =======");
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
-
-       //const char* out_path = "/storage/emulated/0/Movies/output.flv";
-
-    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
-    const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
-
-    // const char* out_path = "/storage/sdcard0/output.flv";
-
-
-
-    LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
-
-       yuv_width=width;
-       yuv_height=height;
-       y_length=width*height;
-       uv_length=width*height/4;
-
-
-       av_register_all();
-
-       //output initialize
-       avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
-       //output encoder initialize
-       pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
-       if (!pCodec){
-               LOGE("Can not find encoder!\n");
-               return -1;
-       }
-       pCodecCtx = avcodec_alloc_context3(pCodec);
-       pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
-       pCodecCtx->width = width;
-       pCodecCtx->height = height;
-       pCodecCtx->time_base.num = 1;
-       pCodecCtx->time_base.den = 30;
-       pCodecCtx->bit_rate = 800000;
-       pCodecCtx->gop_size = 300;
-       /* Some formats want stream headers to be separate. */
-       if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
-               pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
-
-       //H264 codec param
-       //pCodecCtx->me_range = 16;
-       //pCodecCtx->max_qdiff = 4;
-       //pCodecCtx->qcompress = 0.6;
-       pCodecCtx->qmin = 10;
-       pCodecCtx->qmax = 51;
-       //Optional Param
-       pCodecCtx->max_b_frames = 3;
-       // Set H264 preset and tune
-       AVDictionary *param = 0;
-       av_dict_set(&param, "preset", "ultrafast", 0);
-       av_dict_set(&param, "tune", "zerolatency", 0);
-
-       if (avcodec_open2(pCodecCtx, pCodec, &param) < 0){
-               LOGE("Failed to open encoder!\n");
-               return -1;
-       }
-
-       //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
-       video_st = avformat_new_stream(ofmt_ctx, pCodec);
-       if (video_st == NULL){
-               return -1;
-       }
-       video_st->time_base.num = 1;
-       video_st->time_base.den = 30;
-       video_st->codec = pCodecCtx;
-
-       //Open output URL,set before avformat_write_header() for muxing
-       jint ret = 0;
-       if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
-               LOGE("Failed to open output file! return :%d\n", ret);
-               return -1;
-       }
-
-       //Write File Header
-       avformat_write_header(ofmt_ctx, NULL);
-
-       start_time = av_gettime();
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
-       int ret;
-       int got_frame;
-       AVPacket enc_pkt;
-       if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
-               return 0;
-       while (1) {
-               enc_pkt.data = NULL;
-               enc_pkt.size = 0;
-               av_init_packet(&enc_pkt);
-               ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
-                       NULL, &got_frame);
-               if (ret < 0)
-                       break;
-               if (!got_frame){
-                       ret = 0;
-                       break;
-               }
-               LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
-
-               //Write PTS
-               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
-               AVRational r_framerate1 = { 60, 2 };
-               AVRational time_base_q = { 1, AV_TIME_BASE };
-               //Duration between 2 frames (us)
-               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
-               //Parameters
-               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
-               enc_pkt.dts = enc_pkt.pts;
-               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
-
-               //转换PTS/DTS(Convert PTS/DTS)
-               enc_pkt.pos = -1;
-               framecnt++;
-               ofmt_ctx->duration = enc_pkt.duration * framecnt;
-
-               /* mux encoded frame */
-               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
-               if (ret < 0)
-                       break;
-       }
-       //Write file trailer
-       av_write_trailer(ofmt_ctx);
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
-       if (video_st)
-               avcodec_close(video_st->codec);
-       avio_close(ofmt_ctx->pb);
-       avformat_free_context(ofmt_ctx);
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
-       int ret;
-       int enc_got_frame=0;
-       int i=0;
-
-    //LOGE(" process data - ffmpeg");
-       pFrameYUV = av_frame_alloc();
-       uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
-       avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
-
-       //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
-       jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
-       memcpy(pFrameYUV->data[0],in,y_length);
-       for(i=0;i<uv_length;i++)
-       {
-               *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
-               *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
-       }
-
-       pFrameYUV->format = AV_PIX_FMT_YUV420P;
-       pFrameYUV->width = yuv_width;
-       pFrameYUV->height = yuv_height;
-
-       enc_pkt.data = NULL;
-       enc_pkt.size = 0;
-       av_init_packet(&enc_pkt);
-       ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
-       av_frame_free(&pFrameYUV);
-
-       if (enc_got_frame == 1){
-               //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
-               framecnt++;
-               enc_pkt.stream_index = video_st->index;
-
-               //Write PTS
-               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
-               AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
-               AVRational time_base_q = { 1, AV_TIME_BASE };
-               //Duration between 2 frames (us)
-               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
-               //Parameters
-               //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
-               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
-               enc_pkt.dts = enc_pkt.pts;
-               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
-               enc_pkt.pos = -1;
-
-               //Delay
-               int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
-               int64_t now_time = av_gettime() - start_time;
-               if (pts_time > now_time)
-                       av_usleep(pts_time - now_time);
-
-               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
-               av_free_packet(&enc_pkt);
-       }
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
-
-
-
-
-
-    LOGE("###### video play #####");
-    // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
-    const char * file_name = env->GetStringUTFChars(fname, 0);
-
-    av_register_all();
-      avdevice_register_all();
-
-
-    AVFormatContext * pFormatCtx = avformat_alloc_context();
-
-
-//////////
-              av_log_set_callback(custom_log);
-
-     AVInputFormat *ifmt=av_find_input_format("video4linux2");
-     LOGE("===%s===", ifmt->name);
-     if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
-             LOGE("Couldn't open file:\n");
-             return -1; // Couldn't open file
-     }
-
-
-///////////
-
-/*
-    // Open video file
-    if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
-
-        LOGE("Couldn't open file:%s\n", file_name);
-        return -1; // Couldn't open file
-    }
-*/
-    // Retrieve stream information
-    if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
-        LOGE("Couldn't find stream information.");
-        return -1;
-    }
-
-    // Find the first video stream
-    int videoStream = -1, i;
-    for (i = 0; i < pFormatCtx->nb_streams; i++) {
-        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
-           && videoStream < 0) {
-            videoStream = i;
-        }
-    }
-    if(videoStream==-1) {
-        LOGE("Didn't find a video stream.");
-        return -1; // Didn't find a video stream
-    }
-
-    // Get a pointer to the codec context for the video stream
-    AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
-    LOGE("============= %d ========",__LINE__);
-    // Find the decoder for the video stream
-    AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
-    if(pCodec==NULL) {
-        LOGE("Codec not found.");
-        return -1; // Codec not found
-    }
-
-    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
-        LOGE("Could not open codec.");
-        return -1; // Could not open codec
-    }
-
-    // 获取native window
-    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
-
-    // 获取视频宽高
-    int videoWidth = pCodecCtx->width;
-    int videoHeight = pCodecCtx->height;
-
-    // 设置native window的buffer大小,可自动拉伸
-    ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
-    ANativeWindow_Buffer windowBuffer;
-
-    if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
-        LOGE("Could not open codec.");
-        return -1; // Could not open codec
-    }
-
-    LOGE("stream format:%s", pFormatCtx->iformat->name);
-    LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
-    LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
-    LOGE("Decoder name:%s", pCodec->name);
-
-    // Allocate video frame
-    AVFrame * pFrame = av_frame_alloc();
-
-    // 用于渲染
-    AVFrame * pFrameRGBA = av_frame_alloc();
-    if(pFrameRGBA == NULL || pFrame == NULL) {
-        LOGE("Could not allocate video frame.");
-        return -1;
-    }
-
-    // Determine required buffer size and allocate buffer
-    int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
-    uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
-    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
-                         pCodecCtx->width, pCodecCtx->height, 1);
-
-    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
-    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
-                             pCodecCtx->height,
-                             pCodecCtx->pix_fmt,
-                             pCodecCtx->width,
-                             pCodecCtx->height,
-                             AV_PIX_FMT_RGBA,
-                             SWS_BILINEAR,
-                             NULL,
-                             NULL,
-                             NULL);
-
-    int frameFinished;
-    AVPacket packet;
-    while(av_read_frame(pFormatCtx, &packet)>=0) {
-        // Is this a packet from the video stream?
-        if(packet.stream_index==videoStream) {
-
-            // Decode video frame
-            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
-
-            // 并不是decode一次就可解码出一帧
-            if (frameFinished) {
-
-                // lock native window buffer
-                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
-
-                // 格式转换
-                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
-                          pFrame->linesize, 0, pCodecCtx->height,
-                          pFrameRGBA->data, pFrameRGBA->linesize);
-
-                // 获取stride
-                uint8_t * dst = (uint8_t*) windowBuffer.bits;
-                int dstStride = windowBuffer.stride * 4;
-                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
-                int srcStride = pFrameRGBA->linesize[0];
-
-                // 由于window的stride和帧的stride不同,因此需要逐行复制
-                int h;
-                for (h = 0; h < videoHeight; h++) {
-                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
-                }
-
-                ANativeWindow_unlockAndPost(nativeWindow);
-            }
-
-        }
-        av_packet_unref(&packet);
-    }
-
-    av_free(buffer);
-    av_free(pFrameRGBA);
-
-    // Free the YUV frame
-    av_free(pFrame);
-
-    // Close the codecs
-    avcodec_close(pCodecCtx);
-
-    // Close the video file
-    avformat_close_input(&pFormatCtx);
-
-     env->ReleaseStringUTFChars(fname, file_name);
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface) {
-
-    /*
-    av_log_set_level(AV_LOG_TRACE);
-    av_register_all();
-    avformat_network_init();
-    avdevice_register_all();
-    */
-
-    LOGE("====push=====");
-//    av_log_set_callback(custom_log);
-
-    int ret = 0;
-    /// Open Input
-    AVFormatContext *pFormatCtx = avformat_alloc_context();
-
-    AVInputFormat *ifmt = av_find_input_format("video4linux2");
-    if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
-    //    if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
-
-        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
-        return -1;
-    }
-
-    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
-        LOGE( "could not find stream info");
-        return -1;
-    }
-
-    av_dump_format(pFormatCtx, 0, "0", 0);
-
-    AVCodec *dec;
-    int video_index = -1;
-    if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
-        LOGE( "error");
-        return -1;
-    }
-
-    AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
-    if(avcodec_open2(pCodecCtx, dec, NULL) <0){
-        LOGE( "eee");
-        return -1;
-    }
-
-
-    // Open Output
-    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
-    const char* out_path =  "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
-
-    AVFormatContext *ofmt_ctx;
-    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
-    AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
-    if (!oDec) {
-        LOGE("Can not find endoder");
-        return -1;
-    }
-
-    AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
-    oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
-    oCodecCtx->width = pCodecCtx->width;
-    oCodecCtx->height = pCodecCtx->height;
-    oCodecCtx->time_base.num = 1;
-    oCodecCtx->time_base.den = 30;
-    oCodecCtx->bit_rate = 800000;
-    oCodecCtx->gop_size = 300;
-    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
-        oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
-    oCodecCtx->qmin = 10;
-    oCodecCtx->qmax = 51;
-    oCodecCtx->max_b_frames = 3;
-
-    AVDictionary *params = 0;
-    av_dict_set(&params, "preset", "ultrafast", 0);
-    av_dict_set(&params, "tune", "zerolatency", 0);
-
-    if (avcodec_open2(oCodecCtx, oDec, &params) < 0){
-        LOGE("Failed to open encoder");
-        return -1;
-    }
-
-    AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
-    if (videoStream == NULL){
-        return -1;
-    }
-
-    videoStream->time_base.num = 1;
-    videoStream->time_base.den = 30;
-    videoStream->codec = oCodecCtx;
-
-    if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
-        LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
-        //LOGE("Failed open out file22 erro=%d", ret);
-        return -1;
-    }
-
-    avformat_write_header(ofmt_ctx, NULL);
-    /////////////
-
-
-
-
-    //
-    AVFrame *pFrame, *pFrameYUV;
-    pFrame = av_frame_alloc();
-    pFrameYUV = av_frame_alloc();
-
-    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
-    uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
-    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
-
-    pFrameYUV->format = AV_PIX_FMT_YUV420P;
-    pFrameYUV->width = pCodecCtx->width;
-    pFrameYUV->height = pCodecCtx->height;
-
-    struct SwsContext *img_convert_ctx;
-    img_convert_ctx = sws_getContext(pCodecCtx->width,
-                              pCodecCtx->height,
-                              pCodecCtx->pix_fmt,
-                              pCodecCtx->width,
-                              pCodecCtx->height,
-                              AV_PIX_FMT_YUV420P,
-                              SWS_BICUBIC,
-                              NULL, NULL, NULL);
-
-    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
-    int got_picture = 0;
-
-    AVPacket enc_pkt ;
-
-    int64_t framecnt = 0;
-
-    while(av_read_frame(pFormatCtx, packet) >= 0){
-        if (packet->stream_index == video_index){
-            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
-            if (ret < 0){
-                LOGE("Decode Error.");
-                return -1;
-            }
-            if (got_picture){
-                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
-
-                enc_pkt.data = NULL;
-                enc_pkt.size = 0;
-                av_init_packet(&enc_pkt);
-                int enc_got_frame = 0;
-                ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
-                if (enc_got_frame == 1){
-
-                           framecnt++;
-                    enc_pkt.stream_index = videoStream->index;
-
-                    // write PTS
-                    AVRational time_base = ofmt_ctx->streams[0]->time_base;
-                    AVRational r_framerate1 = {60, 2};
-                    AVRational time_base_q = {1, AV_TIME_BASE};
-
-                           int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));  //内部时间戳
-                    enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
-                    enc_pkt.dts = enc_pkt.pts;
-                    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
-                    enc_pkt.pos = -1;
-
-                    int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
-
-                ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
-                //av_frame_free(&pFrameYUV);
-                //av_packet_unref(packet);
-
-                av_free_packet(&enc_pkt);
-                //av_packet_unref(&enc_pkt);
-                }
-            }
-        }
-        av_packet_unref(packet);
-    }
-
-    sws_freeContext(img_convert_ctx);
-    av_free(pFrameYUV);
-    av_free(pFrame);
-    avcodec_close(pCodecCtx);
-    avformat_close_input(&pFormatCtx);
-    return 0;
-}
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
-
-    LOGE("###### video preview #####");
-
-    av_register_all();
-    avdevice_register_all();
-
-
-    AVFormatContext * pFormatCtx = avformat_alloc_context();
-
-
-    av_log_set_callback(custom_log);
-
-     AVInputFormat *ifmt=av_find_input_format("video4linux2");
-     LOGE("===%s===", ifmt->name);
-     if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
-             LOGE("Couldn't open file:\n");
-             return -1; // Couldn't open file
-     }
-
-    // Retrieve stream information
-    if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
-        LOGE("Couldn't find stream information.");
-        return -1;
-    }
-
-    // Find the first video stream
-    int videoStream = -1, i;
-    for (i = 0; i < pFormatCtx->nb_streams; i++) {
-        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
-           && videoStream < 0) {
-            videoStream = i;
-        }
-    }
-    if(videoStream==-1) {
-        LOGE("Didn't find a video stream.");
-        return -1; // Didn't find a video stream
-    }
-
-    // Get a pointer to the codec context for the video stream
-    AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
-    LOGE("============= %d ========",__LINE__);
-    // Find the decoder for the video stream
-    AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
-    if(pCodec==NULL) {
-        LOGE("Codec not found.");
-        return -1; // Codec not found
-    }
-
-    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
-        LOGE("Could not open codec.");
-        return -1; // Could not open codec
-    }
-
-    // 获取native window
-    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
-
-    // 获取视频宽高
-    int videoWidth = pCodecCtx->width;
-    int videoHeight = pCodecCtx->height;
-
-    // 设置native window的buffer大小,可自动拉伸
-    ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
-    ANativeWindow_Buffer windowBuffer;
-
-
-    LOGE("stream format:%s", pFormatCtx->iformat->name);
-    LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
-    LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
-    LOGE("Decoder name:%s", pCodec->name);
-
-    // Allocate video frame
-    AVFrame * pFrame = av_frame_alloc();
-
-    // 用于渲染
-    AVFrame * pFrameRGBA = av_frame_alloc();
-    if(pFrameRGBA == NULL || pFrame == NULL) {
-        LOGE("Could not allocate video frame.");
-        return -1;
-    }
-
-    // Determine required buffer size and allocate buffer
-    int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
-    uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
-    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
-                         pCodecCtx->width, pCodecCtx->height, 1);
-
-    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
-    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
-                             pCodecCtx->height,
-                             pCodecCtx->pix_fmt,
-                             pCodecCtx->width,
-                             pCodecCtx->height,
-                             AV_PIX_FMT_RGBA,
-                             SWS_BILINEAR,
-                             NULL,
-                             NULL,
-                             NULL);
-
-    int frameFinished;
-    AVPacket packet;
-    while(av_read_frame(pFormatCtx, &packet)>=0) {
-        // Is this a packet from the video stream?
-        if(packet.stream_index==videoStream) {
-
-            // Decode video frame
-            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
-
-            // 并不是decode一次就可解码出一帧
-            if (frameFinished) {
-
-                // lock native window buffer
-                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
-
-                // 格式转换
-                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
-                          pFrame->linesize, 0, pCodecCtx->height,
-                          pFrameRGBA->data, pFrameRGBA->linesize);
-
-                // 获取stride
-                uint8_t * dst = (uint8_t*) windowBuffer.bits;
-                int dstStride = windowBuffer.stride * 4;
-                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
-                int srcStride = pFrameRGBA->linesize[0];
-
-                // 由于window的stride和帧的stride不同,因此需要逐行复制
-                int h;
-                for (h = 0; h < videoHeight; h++) {
-                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
-                }
-
-                ANativeWindow_unlockAndPost(nativeWindow);
-            }
-
-        }
-        av_packet_unref(&packet);
-    }
-
-    av_free(buffer);
-    av_free(pFrameRGBA);
-
-    // Free the YUV frame
-    av_free(pFrame);
-
-    // Close the codecs
-    avcodec_close(pCodecCtx);
-
-    // Close the video file
-    avformat_close_input(&pFormatCtx);
-
-     //env->ReleaseStringUTFChars(fname, file_name);
-    return 0;
-}
-
-JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
-    int ret;
-    LOGE("getPerfectDevice");
-    AVFormatContext *pFormatCtx = avformat_alloc_context();
-    AVInputFormat *ifmt = av_find_input_format("video4linux2");
-    if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
-        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
-        //return ;
-    }
-    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
-        LOGE( "could not find stream info");
-        //return -1;
-    }
-    av_dump_format(pFormatCtx, 0, "0", 0);
-    avformat_free_context(pFormatCtx);
-    //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
-    system("touch /storage/sdcard0/aa");
-
-    return env->NewStringUTF("====== Ffmpeg call =======");
-}
-
-
-
-
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
-    char path[512] = {0};
-    char* real_path = NULL;
-
-    LOGE("=================");
-    //system("su -c chmod 666 /dev/video0");
-    /*
-#ifdef ANDROID_USB_CAMERA
-    //MY_USB_CAMER_FD = fd;
-    avdevice_set_android_usb_fd(fd);
-
-    //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
-#endif
-
-    sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
-    if(path[0] != '\0'){
-        LOGE("fd path is %s.", path);
-        real_path = realpath(path, NULL);
-        if(real_path != NULL){
-            LOGE("get full path from fd %s.", real_path);
-            free(real_path);
-        }
-    }
-*/
-
-/*
-
-
-
-    LOGE("====push=====");
-//    av_log_set_callback(custom_log);
-
-    int ret = 0;
-    /// Open Input
-    AVFormatContext *pFormatCtx = avformat_alloc_context();
-
-    AVInputFormat *ifmt = av_find_input_format("video4linux2");
-    //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
-        if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
-
-        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
-        return -1;
-    }
-
-    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
-        LOGE( "could not find stream info");
-        return -1;
-    }
-
-    av_dump_format(pFormatCtx, 0, "0", 0);
-
-    AVCodec *dec;
-    int video_index = -1;
-    if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
-        LOGE( "error");
-        return -1;
-    }
-
-    AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
-    if(avcodec_open2(pCodecCtx, dec, NULL) <0){
-        LOGE( "eee");
-        return -1;
-    }
-
-
-    // Open Output
-    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
-    const char* out_path =  "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
-
-    AVFormatContext *ofmt_ctx;
-    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
-    AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
-    if (!oDec) {
-        LOGE("Can not find endoder");
-        return -1;
-    }
-
-    AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
-    oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
-    oCodecCtx->width = pCodecCtx->width;
-    oCodecCtx->height = pCodecCtx->height;
-    oCodecCtx->time_base.num = 1;
-    oCodecCtx->time_base.den = 30;
-    oCodecCtx->bit_rate = 800000;
-    oCodecCtx->gop_size = 300;
-    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
-        oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
-    oCodecCtx->qmin = 10;
-    oCodecCtx->qmax = 51;
-    oCodecCtx->max_b_frames = 3;
-
-    AVDictionary *params = 0;
-    av_dict_set(&params, "preset", "ultrafast", 0);
-    av_dict_set(&params, "tune", "zerolatency", 0);
-
-    if (avcodec_open2(oCodecCtx, oDec, &params) < 0){
-        LOGE("Failed to open encoder");
-        return -1;
-    }
-
-    AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
-    if (videoStream == NULL){
-        return -1;
-    }
-
-    videoStream->time_base.num = 1;
-    videoStream->time_base.den = 30;
-    videoStream->codec = oCodecCtx;
-
-    if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
-        LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
-        //LOGE("Failed open out file22 erro=%d", ret);
-        return -1;
-    }
-
-    avformat_write_header(ofmt_ctx, NULL);
-    /////////////
-
-
-
-
-    //
-    AVFrame *pFrame, *pFrameYUV;
-    pFrame = av_frame_alloc();
-    pFrameYUV = av_frame_alloc();
-
-    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
-    uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
-    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
-
-    pFrameYUV->format = AV_PIX_FMT_YUV420P;
-    pFrameYUV->width = pCodecCtx->width;
-    pFrameYUV->height = pCodecCtx->height;
-
-    struct SwsContext *img_convert_ctx;
-    img_convert_ctx = sws_getContext(pCodecCtx->width,
-                              pCodecCtx->height,
-                              pCodecCtx->pix_fmt,
-                              pCodecCtx->width,
-                              pCodecCtx->height,
-                              AV_PIX_FMT_YUV420P,
-                              SWS_BICUBIC,
-                              NULL, NULL, NULL);
-
-    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
-    int got_picture = 0;
-
-    AVPacket enc_pkt ;
-
-    int64_t framecnt = 0;
-
-    while(av_read_frame(pFormatCtx, packet) >= 0){
-        if (packet->stream_index == video_index){
-            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
-            if (ret < 0){
-                LOGE("Decode Error.");
-                return -1;
-            }
-            if (got_picture){
-                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
-
-                enc_pkt.data = NULL;
-                enc_pkt.size = 0;
-                av_init_packet(&enc_pkt);
-                int enc_got_frame = 0;
-                ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
-                if (enc_got_frame == 1){
-
-                           framecnt++;
-                    enc_pkt.stream_index = videoStream->index;
-
-                    // write PTS
-                    AVRational time_base = ofmt_ctx->streams[0]->time_base;
-                    AVRational r_framerate1 = {60, 2};
-                    AVRational time_base_q = {1, AV_TIME_BASE};
-
-                           int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));  //内部时间戳
-                    enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
-                    enc_pkt.dts = enc_pkt.pts;
-                    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
-                    enc_pkt.pos = -1;
-
-                    int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
-
-                ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
-                //av_frame_free(&pFrameYUV);
-                //av_packet_unref(packet);
-
-                av_free_packet(&enc_pkt);
-                //av_packet_unref(&enc_pkt);
-                }
-            }
-        }
-        av_packet_unref(packet);
-    }
-
-    sws_freeContext(img_convert_ctx);
-    av_free(pFrameYUV);
-    av_free(pFrame);
-    avcodec_close(pCodecCtx);
-    avformat_close_input(&pFormatCtx);
-
-
-*/
-
-
-
-
-
-
-
-
-
-
-
-}