capture video on usb camera and encode with libx264
[rtmpclient.git] / app / src / main / jni / ai_suanzi_rtmpclient_Ffmpeg.cpp
index 485afea..e87fab1 100644 (file)
@@ -12,6 +12,7 @@ extern "C" {
     #include "libswscale/swscale.h"
     #include "libavutil/imgutils.h"
     #include "libavutil/time.h"
+    #include "libavdevice/avdevice.h"
 }
 
 int64_t start_time;
@@ -23,15 +24,72 @@ AVPacket enc_pkt;
 AVFrame *pFrameYUV;
 
 
+void custom_log(void *ptr, int level, const char* fmt, va_list vl){
+
+    //To TXT file
+
+    /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
+    if(fp){
+        vfprintf(fp,fmt,vl);
+        fflush(fp);
+        fclose(fp);
+    }  */
+
+
+    //To Logcat
+   // LOGE(fmt, vl);
+
+
+        static int print_prefix = 1;
+        static int count;
+        static char prev[1024];
+        char line[1024];
+        static int is_atty;
+
+        av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
+
+        strcpy(prev, line);
+        //sanitize((uint8_t *)line);
+
+        if (level <= AV_LOG_WARNING)
+        {
+            LOGE("%s", line);
+        }
+        else
+        {
+            LOGE("%s", line);
+        }
+
+
+
+}
+
+
 int framecnt = 0;
 int yuv_width;
 int yuv_height;
 int y_length;
 int uv_length;
 
+
+
 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
     jint v = avformat_version();
     LOGE("######### Ffmpeg JNI version i= %d", v);
+
+
+    /*AVFormatContext *pFormatCtx = avformat_alloc_context();
+            avdevice_register_all();
+              av_log_set_callback(custom_log);
+        AVInputFormat *ifmt=av_find_input_format("video4linux2");
+        LOGE("===%s===", ifmt->name);
+        if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+            LOGE("Couldn't open input stream.\n");
+                return env->NewStringUTF("===== error =======");
+
+            //return -1;
+        }*/
+
     return env->NewStringUTF("====== Ffmpeg call =======");
 }
 
@@ -39,11 +97,12 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobje
 
        //const char* out_path = "/storage/emulated/0/Movies/output.flv";
 
-    const char* out_path = "rtmp://192.168.0.101:1935/myapp/suanzi";
+    const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
+    // const char* out_path = "/storage/sdcard0/output.flv";
 
 
-    LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
 
+    LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
 
        yuv_width=width;
        yuv_height=height;
@@ -227,6 +286,366 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jo
     return 0;
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject obj2, jstring fname){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
+
+
+
+
+
+    LOGE("###### video play #####");
+    // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
+    const char * file_name = env->GetStringUTFChars(fname, 0);
+
+    av_register_all();
+      avdevice_register_all();
+
+
+    AVFormatContext * pFormatCtx = avformat_alloc_context();
+
+
+//////////
+              av_log_set_callback(custom_log);
+
+     AVInputFormat *ifmt=av_find_input_format("video4linux2");
+     LOGE("===%s===", ifmt->name);
+     if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+             LOGE("Couldn't open file:\n");
+             return -1; // Couldn't open file
+     }
+
+
+///////////
+
+/*
+    // Open video file
+    if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
+
+        LOGE("Couldn't open file:%s\n", file_name);
+        return -1; // Couldn't open file
+    }
+*/
+    // Retrieve stream information
+    if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
+        LOGE("Couldn't find stream information.");
+        return -1;
+    }
+
+    // Find the first video stream
+    int videoStream = -1, i;
+    for (i = 0; i < pFormatCtx->nb_streams; i++) {
+        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
+           && videoStream < 0) {
+            videoStream = i;
+        }
+    }
+    if(videoStream==-1) {
+        LOGE("Didn't find a video stream.");
+        return -1; // Didn't find a video stream
+    }
+
+    // Get a pointer to the codec context for the video stream
+    AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
+LOGE("============= %d ========",__LINE__);
+    // Find the decoder for the video stream
+    AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+    if(pCodec==NULL) {
+        LOGE("Codec not found.");
+        return -1; // Codec not found
+    }
+
+    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
+        LOGE("Could not open codec.");
+        return -1; // Could not open codec
+    }
+
+    // 获取native window
+    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
+
+    // 获取视频宽高
+    int videoWidth = pCodecCtx->width;
+    int videoHeight = pCodecCtx->height;
+
+    // 设置native window的buffer大小,可自动拉伸
+    ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
+    ANativeWindow_Buffer windowBuffer;
+
+    if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
+        LOGE("Could not open codec.");
+        return -1; // Could not open codec
+    }
+
+    LOGE("stream format:%s", pFormatCtx->iformat->name);
+    LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
+    LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
+    LOGE("Decoder name:%s", pCodec->name);
+
+    // Allocate video frame
+    AVFrame * pFrame = av_frame_alloc();
+
+    // 用于渲染
+    AVFrame * pFrameRGBA = av_frame_alloc();
+    if(pFrameRGBA == NULL || pFrame == NULL) {
+        LOGE("Could not allocate video frame.");
+        return -1;
+    }
+
+    // Determine required buffer size and allocate buffer
+    int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
+    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
+                         pCodecCtx->width, pCodecCtx->height, 1);
+
+    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
+    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
+                             pCodecCtx->height,
+                             pCodecCtx->pix_fmt,
+                             pCodecCtx->width,
+                             pCodecCtx->height,
+                             AV_PIX_FMT_RGBA,
+                             SWS_BILINEAR,
+                             NULL,
+                             NULL,
+                             NULL);
+
+    int frameFinished;
+    AVPacket packet;
+    while(av_read_frame(pFormatCtx, &packet)>=0) {
+        // Is this a packet from the video stream?
+        if(packet.stream_index==videoStream) {
+
+            // Decode video frame
+            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
+
+            // 并不是decode一次就可解码出一帧
+            if (frameFinished) {
+
+                // lock native window buffer
+                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
+
+                // 格式转换
+                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
+                          pFrame->linesize, 0, pCodecCtx->height,
+                          pFrameRGBA->data, pFrameRGBA->linesize);
+
+                // 获取stride
+                uint8_t * dst = (uint8_t*) windowBuffer.bits;
+                int dstStride = windowBuffer.stride * 4;
+                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
+                int srcStride = pFrameRGBA->linesize[0];
+
+                // 由于window的stride和帧的stride不同,因此需要逐行复制
+                int h;
+                for (h = 0; h < videoHeight; h++) {
+                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
+                }
+
+                ANativeWindow_unlockAndPost(nativeWindow);
+            }
+
+        }
+        av_packet_unref(&packet);
+    }
+
+    av_free(buffer);
+    av_free(pFrameRGBA);
+
+    // Free the YUV frame
+    av_free(pFrame);
+
+    // Close the codecs
+    avcodec_close(pCodecCtx);
+
+    // Close the video file
+    avformat_close_input(&pFormatCtx);
+
+     env->ReleaseStringUTFChars(fname, file_name);
     return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface) {
+
+    av_log_set_level(AV_LOG_TRACE);
+    av_register_all();
+    avformat_network_init();
+    avdevice_register_all();
+
+    LOGE("====push=====");
+    av_log_set_callback(custom_log);
+
+    int ret = 0;
+    /// Open Input
+    AVFormatContext *pFormatCtx = avformat_alloc_context();
+
+    AVInputFormat *ifmt = av_find_input_format("video4linux2");
+    if(avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL) != 0) {
+        LOGE("could not open file11");
+        return -1;
+    }
+
+    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
+        LOGE( "could not find stream info");
+        return -1;
+    }
+
+    av_dump_format(pFormatCtx, 0, "0", 0);
+
+    AVCodec *dec;
+    int video_index = -1;
+    if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
+        LOGE( "error");
+        return -1;
+    }
+
+    // avcodec_alloc_context3()
+    AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
+    if(avcodec_open2(pCodecCtx, dec, NULL) <0){
+        LOGE( "eee");
+        return -1;
+    }
+
+
+    // Open Output
+    //const char* out_path = "./abc.flv";
+    const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
+    //     const char* out_path = "/storage/sdcard0/output222.flv";
+
+    AVFormatContext *ofmt_ctx;
+    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
+    AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
+    if (!oDec) {
+        LOGE("Can not find endoder");
+        return -1;
+    }
+
+    AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
+    oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+    oCodecCtx->width = pCodecCtx->width;
+    oCodecCtx->height = pCodecCtx->height;
+    oCodecCtx->time_base.num = 1;
+    oCodecCtx->time_base.den = 30;
+    oCodecCtx->bit_rate = 800000;
+    oCodecCtx->gop_size = 300;
+    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
+        oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+    oCodecCtx->qmin = 10;
+    oCodecCtx->qmax = 51;
+    oCodecCtx->max_b_frames = 3;
+
+    AVDictionary *params = 0;
+    av_dict_set(&params, "preset", "ultrafast", 0);
+    av_dict_set(&params, "tune", "zerolatency", 0);
+
+    if (avcodec_open2(oCodecCtx, oDec, &params) < 0){
+        LOGE("Failed to open encoder");
+        return -1;
+    }
+
+    AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
+    if (videoStream == NULL){
+        return -1;
+    }
+
+    videoStream->time_base.num = 1;
+    videoStream->time_base.den = 30;
+    videoStream->codec = oCodecCtx;
+
+    if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
+        LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
+        //LOGE("Failed open out file22 erro=%d", ret);
+        return -1;
+    }
+
+    avformat_write_header(ofmt_ctx, NULL);
+    /////////////
+
+
+
+
+    //
+    AVFrame *pFrame, *pFrameYUV;
+    pFrame = av_frame_alloc();
+    pFrameYUV = av_frame_alloc();
+
+    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
+    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+
+    pFrameYUV->format = AV_PIX_FMT_YUV420P;
+    pFrameYUV->width = pCodecCtx->width;
+    pFrameYUV->height = pCodecCtx->height;
+
+    struct SwsContext *img_convert_ctx;
+    img_convert_ctx = sws_getContext(pCodecCtx->width,
+                              pCodecCtx->height,
+                              pCodecCtx->pix_fmt,
+                              pCodecCtx->width,
+                              pCodecCtx->height,
+                              AV_PIX_FMT_YUV420P,
+                              SWS_BICUBIC,
+                              NULL, NULL, NULL);
+
+    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
+    int got_picture = 0;
+
+    AVPacket enc_pkt ;
+
+    int64_t framecnt = 0;
+
+    while(av_read_frame(pFormatCtx, packet) >= 0){
+        if (packet->stream_index == video_index){
+            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
+            if (ret < 0){
+                LOGE("Decode Error.");
+                return -1;
+            }
+            if (got_picture){
+                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
+
+                enc_pkt.data = NULL;
+                enc_pkt.size = 0;
+                av_init_packet(&enc_pkt);
+                int enc_got_frame = 0;
+                ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
+                if (enc_got_frame == 1){
+
+                           framecnt++;
+                    enc_pkt.stream_index = videoStream->index;
+
+                    // write PTS
+                    AVRational time_base = ofmt_ctx->streams[0]->time_base;
+                    AVRational r_framerate1 = {60, 2};
+                    AVRational time_base_q = {1, AV_TIME_BASE};
+
+                           int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));  //内部时间戳
+                    enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+                    enc_pkt.dts = enc_pkt.pts;
+                    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+                    enc_pkt.pos = -1;
+
+                    int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
+
+                }
+
+
+                ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+                //av_frame_free(&pFrameYUV);
+                //av_packet_unref(packet);
+
+                //av_free_packet(&enc_pkt);
+
+                /*
+                int y_size = pCodecCtx->width * pCodecCtx->height;
+                fwrite(pFrameYUV->data[0], 1, y_size, fp);      // Y
+                fwrite(pFrameYUV->data[1], 1, y_size / 4, fp);  // U
+                fwrite(pFrameYUV->data[2], 1, y_size / 4, fp);  // V
+                */
+            }
+        }
+        av_packet_unref(packet);
+    }
+
+    sws_freeContext(img_convert_ctx);
+    av_free(pFrameYUV);
+    avcodec_close(pCodecCtx);
+    avformat_close_input(&pFormatCtx);
 }
\ No newline at end of file