2 // Created by Peng Li on 30/4/2018.
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
10 #include "libavformat/avformat.h"
11 #include "libavcodec/avcodec.h"
12 #include "libswscale/swscale.h"
13 #include "libavutil/imgutils.h"
14 #include "libavutil/time.h"
18 AVFormatContext *ofmt_ctx;
20 AVCodecContext* pCodecCtx;
32 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
33 jint v = avformat_version();
34 LOGE("######### Ffmpeg JNI version i= %d", v);
35 return env->NewStringUTF("====== Ffmpeg call =======");
38 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
40 //const char* out_path = "/storage/emulated/0/Movies/output.flv";
42 const char* out_path = "rtmp://192.168.0.101:1935/myapp/suanzi";
45 LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
50 y_length=width*height;
51 uv_length=width*height/4;
57 avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
58 //output encoder initialize
59 pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
61 LOGE("Can not find encoder!\n");
64 pCodecCtx = avcodec_alloc_context3(pCodec);
65 pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
66 pCodecCtx->width = width;
67 pCodecCtx->height = height;
68 pCodecCtx->time_base.num = 1;
69 pCodecCtx->time_base.den = 30;
70 pCodecCtx->bit_rate = 800000;
71 pCodecCtx->gop_size = 300;
72 /* Some formats want stream headers to be separate. */
73 if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
74 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
77 //pCodecCtx->me_range = 16;
78 //pCodecCtx->max_qdiff = 4;
79 //pCodecCtx->qcompress = 0.6;
83 pCodecCtx->max_b_frames = 3;
84 // Set H264 preset and tune
85 AVDictionary *param = 0;
86 av_dict_set(¶m, "preset", "ultrafast", 0);
87 av_dict_set(¶m, "tune", "zerolatency", 0);
89 if (avcodec_open2(pCodecCtx, pCodec, ¶m) < 0){
90 LOGE("Failed to open encoder!\n");
94 //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
95 video_st = avformat_new_stream(ofmt_ctx, pCodec);
96 if (video_st == NULL){
99 video_st->time_base.num = 1;
100 video_st->time_base.den = 30;
101 video_st->codec = pCodecCtx;
103 //Open output URL,set before avformat_write_header() for muxing
105 if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
106 LOGE("Failed to open output file! return :%d\n", ret);
111 avformat_write_header(ofmt_ctx, NULL);
113 start_time = av_gettime();
117 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
121 if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
126 av_init_packet(&enc_pkt);
127 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
135 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
138 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
139 AVRational r_framerate1 = { 60, 2 };
140 AVRational time_base_q = { 1, AV_TIME_BASE };
141 //Duration between 2 frames (us)
142 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
144 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
145 enc_pkt.dts = enc_pkt.pts;
146 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
148 //转换PTS/DTS(Convert PTS/DTS)
151 ofmt_ctx->duration = enc_pkt.duration * framecnt;
153 /* mux encoded frame */
154 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
159 av_write_trailer(ofmt_ctx);
163 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
165 avcodec_close(video_st->codec);
166 avio_close(ofmt_ctx->pb);
167 avformat_free_context(ofmt_ctx);
171 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
176 //LOGE(" process data - ffmpeg");
177 pFrameYUV = av_frame_alloc();
178 uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
179 avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
181 //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
182 jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
183 memcpy(pFrameYUV->data[0],in,y_length);
184 for(i=0;i<uv_length;i++)
186 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
187 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
190 pFrameYUV->format = AV_PIX_FMT_YUV420P;
191 pFrameYUV->width = yuv_width;
192 pFrameYUV->height = yuv_height;
196 av_init_packet(&enc_pkt);
197 ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
198 av_frame_free(&pFrameYUV);
200 if (enc_got_frame == 1){
201 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
203 enc_pkt.stream_index = video_st->index;
206 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
207 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
208 AVRational time_base_q = { 1, AV_TIME_BASE };
209 //Duration between 2 frames (us)
210 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1)); //内部时间戳
212 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
213 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
214 enc_pkt.dts = enc_pkt.pts;
215 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
219 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
220 int64_t now_time = av_gettime() - start_time;
221 if (pts_time > now_time)
222 av_usleep(pts_time - now_time);
224 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
225 av_free_packet(&enc_pkt);
230 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject obj2, jstring fname){