X-Git-Url: http://47.100.26.94:8080/?a=blobdiff_plain;f=app%2Fsrc%2Fmain%2Fjni%2Fai_suanzi_rtmpclient_Ffmpeg.cpp;h=e6c8d158382d33318ed01ac71280df8aaff34eed;hb=831cc09829bc6e18d8d0d8bb78063e89ea565ce9;hp=7b16b491bfe7ab36d8c96adbc5be2687d27d0c15;hpb=6d410bf5e67288660c675d0aa76891eb8367e7cc;p=rtmpclient.git diff --git a/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp b/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp index 7b16b49..e6c8d15 100644 --- a/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp +++ b/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp @@ -5,6 +5,11 @@ #include #include #include "log.h" +#include +#include +#include +#include +#include extern "C" { #include "libavformat/avformat.h" @@ -25,19 +30,29 @@ AVFrame *pFrameYUV; void custom_log(void *ptr, int level, const char* fmt, va_list vl){ - //To TXT file - /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+"); if(fp){ - vfprintf(fp,fmt,vl); - fflush(fp); - fclose(fp); + vfprintf(fp,fmt,vl); + fflush(fp); + fclose(fp); } */ + //To Logcat + // LOGE(fmt, vl); + static int print_prefix = 1; + //static char prev[1024]; + char line[1024]; + av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix); - //To Logcat - LOGE(fmt, vl); + //strcpy(prev, line); + //sanitize((uint8_t *)line); + + if (level <= AV_LOG_WARNING){ + LOGE("%s", line); + } else { + LOGE("%s", line); + } } @@ -47,10 +62,35 @@ int yuv_height; int y_length; int uv_length; +JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj ){ + LOGE("########## Ffmpeg Init ##########"); + unsigned int v = avutil_version(); + LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v)); + v = avcodec_version(); + LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v)); + v = avformat_version(); + LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v)); + v = avdevice_version(); + LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v)); + + //system("su -c chmod 666 /dev/video0"); + system("/system/xbin/su -c echo 'wowo' >> /data/local/test"); + system("echo 'wowow' >> /sdcard/peng/test"); + + av_log_set_level(AV_LOG_TRACE); + av_register_all(); + avdevice_register_all(); + avformat_network_init(); + av_log_set_callback(custom_log); +} JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) { jint v = avformat_version(); + LOGE("######### Ffmpeg JNI version i= %d", v); + + system("su -c chmod 666 /dev/video0"); + LOGE("######### Ffmpeg JNI version i= %d", v); @@ -69,12 +109,14 @@ JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *e return env->NewStringUTF("====== Ffmpeg call ======="); } -JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) { +JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_inithaha (JNIEnv *env, jobject obj, jint width, jint height) { //const char* out_path = "/storage/emulated/0/Movies/output.flv"; //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi"; - const char* out_path = "/storage/sdcard0/output.flv"; + const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid"; + + // const char* out_path = "/storage/sdcard0/output.flv"; @@ -208,7 +250,7 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jo int enc_got_frame=0; int i=0; - //LOGE(" process data - ffmpeg"); + LOGE(" process data - ffmpeg"); pFrameYUV = av_frame_alloc(); uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height)); avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height); @@ -321,7 +363,7 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobje // Get a pointer to the codec context for the video stream AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec; -LOGE("============= %d ========",__LINE__); + LOGE("============= %d ========",__LINE__); // Find the decoder for the video stream AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); if(pCodec==NULL) { @@ -436,4 +478,589 @@ LOGE("============= %d ========",__LINE__); env->ReleaseStringUTFChars(fname, file_name); return 0; -} \ No newline at end of file +} + +JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){ + + /* + av_log_set_level(AV_LOG_TRACE); + av_register_all(); + avformat_network_init(); + avdevice_register_all(); + */ + + LOGE("====push====="); +// av_log_set_callback(custom_log); + // Open Output + //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2"; + //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid"; + const char* out_path = env->GetStringUTFChars(url, 0); + //const char * file_name = env->GetStringUTFChars(fname, 0); + + + int ret = 0; + /// Open Input + AVFormatContext *pFormatCtx = avformat_alloc_context(); + + AVInputFormat *ifmt = av_find_input_format("video4linux2"); + if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) { + // if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) { + + LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret)); + return -1; + } + + if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { + LOGE( "could not find stream info"); + return -1; + } + + av_dump_format(pFormatCtx, 0, "0", 0); + + AVCodec *dec; + int video_index = -1; + if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){ + LOGE( "error"); + return -1; + } + + AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec; + if(avcodec_open2(pCodecCtx, dec, NULL) <0){ + LOGE( "eee"); + return -1; + } + + + // Open Output + //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2"; + //const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid"; + + AVFormatContext *ofmt_ctx; + avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path); + AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264); + if (!oDec) { + LOGE("Can not find endoder"); + return -1; + } + + AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec); + oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; + oCodecCtx->width = pCodecCtx->width; + oCodecCtx->height = pCodecCtx->height; + oCodecCtx->time_base.num = 1; + oCodecCtx->time_base.den = 30; + oCodecCtx->bit_rate = 800000; + oCodecCtx->gop_size = 300; + if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) + oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER; + oCodecCtx->qmin = 10; + oCodecCtx->qmax = 51; + oCodecCtx->max_b_frames = 3; + + AVDictionary *params = 0; + av_dict_set(¶ms, "preset", "ultrafast", 0); + av_dict_set(¶ms, "tune", "zerolatency", 0); + + if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){ + LOGE("Failed to open encoder"); + return -1; + } + + AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec); + if (videoStream == NULL){ + return -1; + } + + videoStream->time_base.num = 1; + videoStream->time_base.den = 30; + videoStream->codec = oCodecCtx; + + if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){ + LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) ); + //LOGE("Failed open out file22 erro=%d", ret); + return -1; + } + + avformat_write_header(ofmt_ctx, NULL); + ///////////// + + + + + // + AVFrame *pFrame, *pFrameYUV; + pFrame = av_frame_alloc(); + pFrameYUV = av_frame_alloc(); + + int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); + uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t)); + av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); + + pFrameYUV->format = AV_PIX_FMT_YUV420P; + pFrameYUV->width = pCodecCtx->width; + pFrameYUV->height = pCodecCtx->height; + + struct SwsContext *img_convert_ctx; + img_convert_ctx = sws_getContext(pCodecCtx->width, + pCodecCtx->height, + pCodecCtx->pix_fmt, + pCodecCtx->width, + pCodecCtx->height, + AV_PIX_FMT_YUV420P, + SWS_BICUBIC, + NULL, NULL, NULL); + + AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); + int got_picture = 0; + + AVPacket enc_pkt ; + + int64_t framecnt = 0; + + while(av_read_frame(pFormatCtx, packet) >= 0){ + if (packet->stream_index == video_index){ + ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet); + if (ret < 0){ + LOGE("Decode Error."); + return -1; + } + if (got_picture){ + sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); + + enc_pkt.data = NULL; + enc_pkt.size = 0; + av_init_packet(&enc_pkt); + int enc_got_frame = 0; + ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame); + if (enc_got_frame == 1){ + + framecnt++; + enc_pkt.stream_index = videoStream->index; + + // write PTS + AVRational time_base = ofmt_ctx->streams[0]->time_base; + AVRational r_framerate1 = {60, 2}; + AVRational time_base_q = {1, AV_TIME_BASE}; + + int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳 + enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base); + enc_pkt.dts = enc_pkt.pts; + enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base)); + enc_pkt.pos = -1; + + int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q); + + ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt); + //av_frame_free(&pFrameYUV); + //av_packet_unref(packet); + + av_free_packet(&enc_pkt); + //av_packet_unref(&enc_pkt); + } + } + } + av_packet_unref(packet); + } + + sws_freeContext(img_convert_ctx); + av_free(pFrameYUV); + av_free(pFrame); + avcodec_close(pCodecCtx); + avformat_close_input(&pFormatCtx); + return 0; +} + +JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){ + + LOGE("###### video preview #####"); + + av_register_all(); + avdevice_register_all(); + + + AVFormatContext * pFormatCtx = avformat_alloc_context(); + + + av_log_set_callback(custom_log); + + AVInputFormat *ifmt=av_find_input_format("video4linux2"); + LOGE("===%s===", ifmt->name); + if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){ + LOGE("Couldn't open file:\n"); + return -1; // Couldn't open file + } + + // Retrieve stream information + if(avformat_find_stream_info(pFormatCtx, NULL)<0) { + LOGE("Couldn't find stream information."); + return -1; + } + + // Find the first video stream + int videoStream = -1, i; + for (i = 0; i < pFormatCtx->nb_streams; i++) { + if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO + && videoStream < 0) { + videoStream = i; + } + } + if(videoStream==-1) { + LOGE("Didn't find a video stream."); + return -1; // Didn't find a video stream + } + + // Get a pointer to the codec context for the video stream + AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec; + LOGE("============= %d ========",__LINE__); + // Find the decoder for the video stream + AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id); + if(pCodec==NULL) { + LOGE("Codec not found."); + return -1; // Codec not found + } + + if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) { + LOGE("Could not open codec."); + return -1; // Could not open codec + } + + // 获取native window + ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface); + + // 获取视频宽高 + int videoWidth = pCodecCtx->width; + int videoHeight = pCodecCtx->height; + + // 设置native window的buffer大小,可自动拉伸 + ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888); + ANativeWindow_Buffer windowBuffer; + + + LOGE("stream format:%s", pFormatCtx->iformat->name); + LOGE("duration :%lld", (pFormatCtx->duration) / 1000000); + LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height); + LOGE("Decoder name:%s", pCodec->name); + + // Allocate video frame + AVFrame * pFrame = av_frame_alloc(); + + // 用于渲染 + AVFrame * pFrameRGBA = av_frame_alloc(); + if(pFrameRGBA == NULL || pFrame == NULL) { + LOGE("Could not allocate video frame."); + return -1; + } + + // Determine required buffer size and allocate buffer + int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1); + uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); + av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA, + pCodecCtx->width, pCodecCtx->height, 1); + + // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换 + struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width, + pCodecCtx->height, + pCodecCtx->pix_fmt, + pCodecCtx->width, + pCodecCtx->height, + AV_PIX_FMT_RGBA, + SWS_BILINEAR, + NULL, + NULL, + NULL); + + int frameFinished; + AVPacket packet; + while(av_read_frame(pFormatCtx, &packet)>=0) { + // Is this a packet from the video stream? + if(packet.stream_index==videoStream) { + + // Decode video frame + avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet); + + // 并不是decode一次就可解码出一帧 + if (frameFinished) { + + // lock native window buffer + ANativeWindow_lock(nativeWindow, &windowBuffer, 0); + + // 格式转换 + sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, + pFrame->linesize, 0, pCodecCtx->height, + pFrameRGBA->data, pFrameRGBA->linesize); + + // 获取stride + uint8_t * dst = (uint8_t*) windowBuffer.bits; + int dstStride = windowBuffer.stride * 4; + uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]); + int srcStride = pFrameRGBA->linesize[0]; + + // 由于window的stride和帧的stride不同,因此需要逐行复制 + int h; + for (h = 0; h < videoHeight; h++) { + memcpy(dst + h * dstStride, src + h * srcStride, srcStride); + } + + ANativeWindow_unlockAndPost(nativeWindow); + } + + } + av_packet_unref(&packet); + } + + av_free(buffer); + av_free(pFrameRGBA); + + // Free the YUV frame + av_free(pFrame); + + // Close the codecs + avcodec_close(pCodecCtx); + + // Close the video file + avformat_close_input(&pFormatCtx); + + //env->ReleaseStringUTFChars(fname, file_name); + return 0; +} + +JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) { + int ret; + LOGE("getPerfectDevice"); + AVFormatContext *pFormatCtx = avformat_alloc_context(); + AVInputFormat *ifmt = av_find_input_format("video4linux2"); + if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) { + LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret)); + //return ; + } + if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { + LOGE( "could not find stream info"); + //return -1; + } + av_dump_format(pFormatCtx, 0, "0", 0); + avformat_free_context(pFormatCtx); + //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\""); + system("touch /storage/sdcard0/aa"); + + return env->NewStringUTF("====== Ffmpeg call ======="); +} + + + + +JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){ + char path[512] = {0}; + char* real_path = NULL; + + LOGE("================="); + //system("su -c chmod 666 /dev/video0"); + /* +#ifdef ANDROID_USB_CAMERA + //MY_USB_CAMER_FD = fd; + avdevice_set_android_usb_fd(fd); + + //LOGE("MY camer fd is %d", MY_USB_CAMER_FD); +#endif + + sprintf(path, "/proc/%d/fd/%d", getpid(), fd); + if(path[0] != '\0'){ + LOGE("fd path is %s.", path); + real_path = realpath(path, NULL); + if(real_path != NULL){ + LOGE("get full path from fd %s.", real_path); + free(real_path); + } + } +*/ + +/* + + + + LOGE("====push====="); +// av_log_set_callback(custom_log); + + int ret = 0; + /// Open Input + AVFormatContext *pFormatCtx = avformat_alloc_context(); + + AVInputFormat *ifmt = av_find_input_format("video4linux2"); + //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) { + if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) { + + LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret)); + return -1; + } + + if (avformat_find_stream_info(pFormatCtx, NULL) < 0) { + LOGE( "could not find stream info"); + return -1; + } + + av_dump_format(pFormatCtx, 0, "0", 0); + + AVCodec *dec; + int video_index = -1; + if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){ + LOGE( "error"); + return -1; + } + + AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec; + if(avcodec_open2(pCodecCtx, dec, NULL) <0){ + LOGE( "eee"); + return -1; + } + + + // Open Output + //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2"; + const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid"; + + AVFormatContext *ofmt_ctx; + avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path); + AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264); + if (!oDec) { + LOGE("Can not find endoder"); + return -1; + } + + AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec); + oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P; + oCodecCtx->width = pCodecCtx->width; + oCodecCtx->height = pCodecCtx->height; + oCodecCtx->time_base.num = 1; + oCodecCtx->time_base.den = 30; + oCodecCtx->bit_rate = 800000; + oCodecCtx->gop_size = 300; + if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) + oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER; + oCodecCtx->qmin = 10; + oCodecCtx->qmax = 51; + oCodecCtx->max_b_frames = 3; + + AVDictionary *params = 0; + av_dict_set(¶ms, "preset", "ultrafast", 0); + av_dict_set(¶ms, "tune", "zerolatency", 0); + + if (avcodec_open2(oCodecCtx, oDec, ¶ms) < 0){ + LOGE("Failed to open encoder"); + return -1; + } + + AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec); + if (videoStream == NULL){ + return -1; + } + + videoStream->time_base.num = 1; + videoStream->time_base.den = 30; + videoStream->codec = oCodecCtx; + + if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){ + LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) ); + //LOGE("Failed open out file22 erro=%d", ret); + return -1; + } + + avformat_write_header(ofmt_ctx, NULL); + ///////////// + + + + + // + AVFrame *pFrame, *pFrameYUV; + pFrame = av_frame_alloc(); + pFrameYUV = av_frame_alloc(); + + int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); + uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t)); + av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1); + + pFrameYUV->format = AV_PIX_FMT_YUV420P; + pFrameYUV->width = pCodecCtx->width; + pFrameYUV->height = pCodecCtx->height; + + struct SwsContext *img_convert_ctx; + img_convert_ctx = sws_getContext(pCodecCtx->width, + pCodecCtx->height, + pCodecCtx->pix_fmt, + pCodecCtx->width, + pCodecCtx->height, + AV_PIX_FMT_YUV420P, + SWS_BICUBIC, + NULL, NULL, NULL); + + AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket)); + int got_picture = 0; + + AVPacket enc_pkt ; + + int64_t framecnt = 0; + + while(av_read_frame(pFormatCtx, packet) >= 0){ + if (packet->stream_index == video_index){ + ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet); + if (ret < 0){ + LOGE("Decode Error."); + return -1; + } + if (got_picture){ + sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize); + + enc_pkt.data = NULL; + enc_pkt.size = 0; + av_init_packet(&enc_pkt); + int enc_got_frame = 0; + ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame); + if (enc_got_frame == 1){ + + framecnt++; + enc_pkt.stream_index = videoStream->index; + + // write PTS + AVRational time_base = ofmt_ctx->streams[0]->time_base; + AVRational r_framerate1 = {60, 2}; + AVRational time_base_q = {1, AV_TIME_BASE}; + + int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳 + enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base); + enc_pkt.dts = enc_pkt.pts; + enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base)); + enc_pkt.pos = -1; + + int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q); + + ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt); + //av_frame_free(&pFrameYUV); + //av_packet_unref(packet); + + av_free_packet(&enc_pkt); + //av_packet_unref(&enc_pkt); + } + } + } + av_packet_unref(packet); + } + + sws_freeContext(img_convert_ctx); + av_free(pFrameYUV); + av_free(pFrame); + avcodec_close(pCodecCtx); + avformat_close_input(&pFormatCtx); + + +*/ + + + + + + + + + + + +}