Preview usb camera
[rtmpclient.git] / app / src / main / jni / ai_suanzi_rtmpclient_Ffmpeg.cpp
1 //
2 // Created by Peng Li on 30/4/2018.
3 //
4 #include "ai_suanzi_rtmpclient_Ffmpeg.h"
5 #include <android/native_window.h>
6 #include <android/native_window_jni.h>
7 #include "log.h"
8
9 extern "C" {
10     #include "libavformat/avformat.h"
11     #include "libavcodec/avcodec.h"
12     #include "libswscale/swscale.h"
13     #include "libavutil/imgutils.h"
14     #include "libavutil/time.h"
15     #include "libavdevice/avdevice.h"
16 }
17
18 int64_t start_time;
19 AVFormatContext *ofmt_ctx;
20 AVStream* video_st;
21 AVCodecContext* pCodecCtx;
22 AVCodec* pCodec;
23 AVPacket enc_pkt;
24 AVFrame *pFrameYUV;
25
26
27 void custom_log(void *ptr, int level, const char* fmt, va_list vl){
28
29     //To TXT file
30
31     /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
32     if(fp){
33         vfprintf(fp,fmt,vl);
34         fflush(fp);
35         fclose(fp);
36     }  */
37
38
39     //To Logcat
40     LOGE(fmt, vl);
41 }
42
43
44 int framecnt = 0;
45 int yuv_width;
46 int yuv_height;
47 int y_length;
48 int uv_length;
49
50
51
52 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
53     jint v = avformat_version();
54     LOGE("######### Ffmpeg JNI version i= %d", v);
55
56
57     /*AVFormatContext *pFormatCtx = avformat_alloc_context();
58             avdevice_register_all();
59               av_log_set_callback(custom_log);
60         AVInputFormat *ifmt=av_find_input_format("video4linux2");
61         LOGE("===%s===", ifmt->name);
62         if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
63             LOGE("Couldn't open input stream.\n");
64                 return env->NewStringUTF("===== error =======");
65
66             //return -1;
67         }*/
68
69     return env->NewStringUTF("====== Ffmpeg call =======");
70 }
71
72 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
73
74         //const char* out_path = "/storage/emulated/0/Movies/output.flv";
75
76     //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
77      const char* out_path = "/storage/sdcard0/output.flv";
78
79
80
81     LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
82
83         yuv_width=width;
84         yuv_height=height;
85         y_length=width*height;
86         uv_length=width*height/4;
87
88
89         av_register_all();
90
91         //output initialize
92         avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
93         //output encoder initialize
94         pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
95         if (!pCodec){
96                 LOGE("Can not find encoder!\n");
97                 return -1;
98         }
99         pCodecCtx = avcodec_alloc_context3(pCodec);
100         pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
101         pCodecCtx->width = width;
102         pCodecCtx->height = height;
103         pCodecCtx->time_base.num = 1;
104         pCodecCtx->time_base.den = 30;
105         pCodecCtx->bit_rate = 800000;
106         pCodecCtx->gop_size = 300;
107         /* Some formats want stream headers to be separate. */
108         if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
109                 pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
110
111         //H264 codec param
112         //pCodecCtx->me_range = 16;
113         //pCodecCtx->max_qdiff = 4;
114         //pCodecCtx->qcompress = 0.6;
115         pCodecCtx->qmin = 10;
116         pCodecCtx->qmax = 51;
117         //Optional Param
118         pCodecCtx->max_b_frames = 3;
119         // Set H264 preset and tune
120         AVDictionary *param = 0;
121         av_dict_set(&param, "preset", "ultrafast", 0);
122         av_dict_set(&param, "tune", "zerolatency", 0);
123
124         if (avcodec_open2(pCodecCtx, pCodec, &param) < 0){
125                 LOGE("Failed to open encoder!\n");
126                 return -1;
127         }
128
129         //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
130         video_st = avformat_new_stream(ofmt_ctx, pCodec);
131         if (video_st == NULL){
132                 return -1;
133         }
134         video_st->time_base.num = 1;
135         video_st->time_base.den = 30;
136         video_st->codec = pCodecCtx;
137
138         //Open output URL,set before avformat_write_header() for muxing
139         jint ret = 0;
140         if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
141                 LOGE("Failed to open output file! return :%d\n", ret);
142                 return -1;
143         }
144
145         //Write File Header
146         avformat_write_header(ofmt_ctx, NULL);
147
148         start_time = av_gettime();
149     return 0;
150 }
151
152 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
153         int ret;
154         int got_frame;
155         AVPacket enc_pkt;
156         if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
157                 return 0;
158         while (1) {
159                 enc_pkt.data = NULL;
160                 enc_pkt.size = 0;
161                 av_init_packet(&enc_pkt);
162                 ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
163                         NULL, &got_frame);
164                 if (ret < 0)
165                         break;
166                 if (!got_frame){
167                         ret = 0;
168                         break;
169                 }
170                 LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
171
172                 //Write PTS
173                 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
174                 AVRational r_framerate1 = { 60, 2 };
175                 AVRational time_base_q = { 1, AV_TIME_BASE };
176                 //Duration between 2 frames (us)
177                 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
178                 //Parameters
179                 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
180                 enc_pkt.dts = enc_pkt.pts;
181                 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
182
183                 //转换PTS/DTS(Convert PTS/DTS)
184                 enc_pkt.pos = -1;
185                 framecnt++;
186                 ofmt_ctx->duration = enc_pkt.duration * framecnt;
187
188                 /* mux encoded frame */
189                 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
190                 if (ret < 0)
191                         break;
192         }
193         //Write file trailer
194         av_write_trailer(ofmt_ctx);
195     return 0;
196 }
197
198 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
199         if (video_st)
200                 avcodec_close(video_st->codec);
201         avio_close(ofmt_ctx->pb);
202         avformat_free_context(ofmt_ctx);
203     return 0;
204 }
205
206 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
207         int ret;
208         int enc_got_frame=0;
209         int i=0;
210
211     //LOGE(" process data - ffmpeg");
212         pFrameYUV = av_frame_alloc();
213         uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
214         avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
215
216         //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
217         jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
218         memcpy(pFrameYUV->data[0],in,y_length);
219         for(i=0;i<uv_length;i++)
220         {
221                 *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
222                 *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
223         }
224
225         pFrameYUV->format = AV_PIX_FMT_YUV420P;
226         pFrameYUV->width = yuv_width;
227         pFrameYUV->height = yuv_height;
228
229         enc_pkt.data = NULL;
230         enc_pkt.size = 0;
231         av_init_packet(&enc_pkt);
232         ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
233         av_frame_free(&pFrameYUV);
234
235         if (enc_got_frame == 1){
236                 //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
237                 framecnt++;
238                 enc_pkt.stream_index = video_st->index;
239
240                 //Write PTS
241                 AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
242                 AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
243                 AVRational time_base_q = { 1, AV_TIME_BASE };
244                 //Duration between 2 frames (us)
245                 int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
246                 //Parameters
247                 //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
248                 enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
249                 enc_pkt.dts = enc_pkt.pts;
250                 enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
251                 enc_pkt.pos = -1;
252
253                 //Delay
254                 int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
255                 int64_t now_time = av_gettime() - start_time;
256                 if (pts_time > now_time)
257                         av_usleep(pts_time - now_time);
258
259                 ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
260                 av_free_packet(&enc_pkt);
261         }
262     return 0;
263 }
264
265 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
266
267
268
269
270
271     LOGE("###### video play #####");
272     // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
273     const char * file_name = env->GetStringUTFChars(fname, 0);
274
275     av_register_all();
276       avdevice_register_all();
277
278
279     AVFormatContext * pFormatCtx = avformat_alloc_context();
280
281
282 //////////
283               av_log_set_callback(custom_log);
284
285      AVInputFormat *ifmt=av_find_input_format("video4linux2");
286      LOGE("===%s===", ifmt->name);
287      if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
288              LOGE("Couldn't open file:\n");
289              return -1; // Couldn't open file
290      }
291
292
293 ///////////
294
295 /*
296     // Open video file
297     if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
298
299         LOGE("Couldn't open file:%s\n", file_name);
300         return -1; // Couldn't open file
301     }
302 */
303     // Retrieve stream information
304     if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
305         LOGE("Couldn't find stream information.");
306         return -1;
307     }
308
309     // Find the first video stream
310     int videoStream = -1, i;
311     for (i = 0; i < pFormatCtx->nb_streams; i++) {
312         if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
313            && videoStream < 0) {
314             videoStream = i;
315         }
316     }
317     if(videoStream==-1) {
318         LOGE("Didn't find a video stream.");
319         return -1; // Didn't find a video stream
320     }
321
322     // Get a pointer to the codec context for the video stream
323     AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
324 LOGE("============= %d ========",__LINE__);
325     // Find the decoder for the video stream
326     AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
327     if(pCodec==NULL) {
328         LOGE("Codec not found.");
329         return -1; // Codec not found
330     }
331
332     if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
333         LOGE("Could not open codec.");
334         return -1; // Could not open codec
335     }
336
337     // 获取native window
338     ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
339
340     // 获取视频宽高
341     int videoWidth = pCodecCtx->width;
342     int videoHeight = pCodecCtx->height;
343
344     // 设置native window的buffer大小,可自动拉伸
345     ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
346     ANativeWindow_Buffer windowBuffer;
347
348     if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
349         LOGE("Could not open codec.");
350         return -1; // Could not open codec
351     }
352
353     LOGE("stream format:%s", pFormatCtx->iformat->name);
354     LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
355     LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
356     LOGE("Decoder name:%s", pCodec->name);
357
358     // Allocate video frame
359     AVFrame * pFrame = av_frame_alloc();
360
361     // 用于渲染
362     AVFrame * pFrameRGBA = av_frame_alloc();
363     if(pFrameRGBA == NULL || pFrame == NULL) {
364         LOGE("Could not allocate video frame.");
365         return -1;
366     }
367
368     // Determine required buffer size and allocate buffer
369     int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
370     uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
371     av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
372                          pCodecCtx->width, pCodecCtx->height, 1);
373
374     // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
375     struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
376                              pCodecCtx->height,
377                              pCodecCtx->pix_fmt,
378                              pCodecCtx->width,
379                              pCodecCtx->height,
380                              AV_PIX_FMT_RGBA,
381                              SWS_BILINEAR,
382                              NULL,
383                              NULL,
384                              NULL);
385
386     int frameFinished;
387     AVPacket packet;
388     while(av_read_frame(pFormatCtx, &packet)>=0) {
389         // Is this a packet from the video stream?
390         if(packet.stream_index==videoStream) {
391
392             // Decode video frame
393             avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
394
395             // 并不是decode一次就可解码出一帧
396             if (frameFinished) {
397
398                 // lock native window buffer
399                 ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
400
401                 // 格式转换
402                 sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
403                           pFrame->linesize, 0, pCodecCtx->height,
404                           pFrameRGBA->data, pFrameRGBA->linesize);
405
406                 // 获取stride
407                 uint8_t * dst = (uint8_t*) windowBuffer.bits;
408                 int dstStride = windowBuffer.stride * 4;
409                 uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
410                 int srcStride = pFrameRGBA->linesize[0];
411
412                 // 由于window的stride和帧的stride不同,因此需要逐行复制
413                 int h;
414                 for (h = 0; h < videoHeight; h++) {
415                     memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
416                 }
417
418                 ANativeWindow_unlockAndPost(nativeWindow);
419             }
420
421         }
422         av_packet_unref(&packet);
423     }
424
425     av_free(buffer);
426     av_free(pFrameRGBA);
427
428     // Free the YUV frame
429     av_free(pFrame);
430
431     // Close the codecs
432     avcodec_close(pCodecCtx);
433
434     // Close the video file
435     avformat_close_input(&pFormatCtx);
436
437      env->ReleaseStringUTFChars(fname, file_name);
438     return 0;
439 }