fix crash issue by reducing the picture size
authorPeng Li <seudut@gmail.com>
Tue, 1 May 2018 11:52:32 +0000 (19:52 +0800)
committerPeng Li <seudut@gmail.com>
Tue, 1 May 2018 11:52:32 +0000 (19:52 +0800)
.gitignore
.idea/caches/build_file_checksums.ser [deleted file]
app/build.gradle
app/src/main/AndroidManifest.xml
app/src/main/java/ai/suanzi/rtmpclient/MainActivity.java
app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp
app/src/main/jni/log.h
app/src/main/res/layout/activity_main.xml
app/src/main/res/values/strings.xml

index 1560cb4..a2e6bd0 100644 (file)
@@ -5,6 +5,7 @@
 /.idea/modules.xml
 /.idea/workspace.xml
 /.idea/vcs.xml
+/.idea/caches/*
 .DS_Store
 /build
 /captures
diff --git a/.idea/caches/build_file_checksums.ser b/.idea/caches/build_file_checksums.ser
deleted file mode 100644 (file)
index addf57e..0000000
Binary files a/.idea/caches/build_file_checksums.ser and /dev/null differ
index a8d1068..b4546ae 100644 (file)
@@ -5,7 +5,7 @@ android {
     defaultConfig {
         applicationId "ai.suanzi.rtmpclient"
         minSdkVersion 21
-        targetSdkVersion 27
+        targetSdkVersion 21
         versionCode 1
         versionName "1.0"
         testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
index 2c9c60c..e954746 100644 (file)
             </intent-filter>
         </activity>
     </application>
+    <uses-feature android:name="android.hardware.Camera" />
+    <uses-permission android:name="android.permission.CAMERA" />
+    <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+    <uses-permission android:name="android.permission.INTERNET" />
 
 </manifest>
\ No newline at end of file
index 31fcbf3..6b29b83 100644 (file)
 package ai.suanzi.rtmpclient;
 
+import android.graphics.ImageFormat;
 import android.support.v7.app.AppCompatActivity;
 import android.os.Bundle;
 import android.util.Log;
+import android.view.SurfaceHolder;
+import android.hardware.Camera;
+import android.view.SurfaceView;
+import android.view.View;
+import android.widget.Button;
+import android.widget.Toast;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.os.AsyncTask;
+import java.util.List;
+import java.io.IOException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
 
-public class MainActivity extends AppCompatActivity {
+
+
+public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Camera.PreviewCallback{
 
     private static final String TAG = "PENG";
     private Ffmpeg ffmpeg = new Ffmpeg();
+    private Camera mCamera ;//= getCameraInstance();
+    private StreamTask mStreamTask;
+    private SurfaceHolder mHolder;
+    ExecutorService mExecutor = Executors.newSingleThreadExecutor();
 
     @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
         setContentView(R.layout.activity_main);
         Log.e(TAG, "Ffmpeg Version: " + ffmpeg.getVersion());
+
+        final Button btn = findViewById(R.id.button);
+        btn.setText("Start");
+        btn.setOnClickListener(new View.OnClickListener() {
+            @Override
+            public void onClick(View v) {
+                Log.e(TAG, "Button " + btn.getText() + " onClick");
+                if (mCamera == null) return;
+                if(btn.getText().equals("Start")){
+                    mCamera.startPreview();
+                    Camera.Parameters params = mCamera.getParameters();
+                    //params.setPreviewFpsRange(30000, 30000);
+                    //params.setPictureSize(320, 240);
+                    //params.setPictureFormat(ImageFormat.NV21);
+                    //mCamera.setParameters(params);
+
+
+
+
+                    ffmpeg.init(params.getPictureSize().width, params.getPictureSize().height);
+                } else {
+                    mCamera.setPreviewCallback(null);
+                    Toast.makeText(MainActivity.this, "encode done", Toast.LENGTH_SHORT).show();
+                    ffmpeg.flush();
+                    ffmpeg.close();
+                }
+                btn.setText(btn.getText().equals("Start") ? "Stop" : "Start");
+            }
+        });
+
+        this.mCamera = getCameraInstance();
+        if(checkCameraHardware(this)) Log.e(TAG, "has cameras: " + Camera.getNumberOfCameras());
+
+        final SurfaceView surfaceView = findViewById(R.id.surfaceView);
+        mHolder = surfaceView.getHolder();
+        mHolder.addCallback(this);
+
+        //
+        mCamera.setPreviewCallback(this);
+    }
+
+    @Override
+    protected void onPause(){
+        super.onPause();
+        Log.e(TAG, "OnPause");
+        //ffmpeg.flush();
+        //ffmpeg.close();
+    }
+
+    @Override
+    protected void onResume() {
+        super.onResume();
+        Log.e(TAG, "OnResume");
+    }
+
+    // class StreamTask AsyncTask
+    private class StreamTask extends AsyncTask<Void, Void, Void>{
+        private byte[] data;
+
+        StreamTask(byte[] data){
+            this.data = data;
+        }
+
+        @Override
+        protected Void doInBackground(Void... params) {
+
+            if (this.data != null){
+                Log.e(TAG, "fps: " + mCamera.getParameters().getPreviewFrameRate());
+                ffmpeg.process(this.data);
+            }
+            return null;
+        }
+    }
+
+    // SurfaceHolder.Callback implementation
+    @Override
+    public void surfaceCreated(final SurfaceHolder holder){
+        Log.e(TAG,"SurfacedCreated");
+        try {
+            mCamera.setPreviewDisplay(holder);
+            //mCamera.startPreview();
+        } catch (IOException e) {
+            Log.d(TAG, "Error setting camera preview: " + e.getMessage());
+        }
+    }
+
+    @Override
+    public void surfaceChanged(SurfaceHolder holder, int format, int widht, int height){
+        Log.e(TAG, "surfaceChanged");
+        //if(mCamera==null) return;
+        //Camera.Parameters parameters=mCamera.getParameters();
+        //parameters.setPreviewSize(640,480);
+        //parameters.setPictureSize(640,480);
+        //mCamera.setParameters(parameters);
+    }
+
+    @Override
+    public void surfaceDestroyed(SurfaceHolder holder){
+        Log.e(TAG, "surfaceDestroyed");
+    }
+
+    private long encodeCount = 0;
+    private long count = 0;
+    private long previewTime;
+    // Camera.PreviewCallback
+    @Override
+    public void  onPreviewFrame(final byte[] data, Camera camera){
+        /*if (null != mStreamTask){
+            switch (mStreamTask.getStatus()){
+                case RUNNING:
+                    Log.e(TAG, "onPreviewFrame Running");
+                    return;
+                case PENDING:
+                    Log.e(TAG,"OnPreviewFrame Pending");
+                    mStreamTask.cancel(false);
+                    break;
+            }
+        }
+        mStreamTask = new StreamTask(data);
+        mStreamTask.execute((Void)null);*/
+
+
+        long endTime = System.currentTimeMillis();
+        mExecutor.execute(new Runnable() {
+            @Override
+            public void run() {
+                long encodeTime = System.currentTimeMillis();
+                ffmpeg.process(data);
+                Log.e(TAG, "编码第:" + (encodeCount++) + "帧,耗时:" + (System.currentTimeMillis() - encodeTime));
+            }
+        });
+        Log.e(TAG, "采集第:" + (++count) + "帧,距上一帧间隔时间:"
+                + (endTime - previewTime) + "  " + Thread.currentThread().getName());
+        previewTime = endTime;
+
     }
+
+    // private
+    private boolean checkCameraHardware(Context context) {
+        return context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA);
+    }
+
+    private static Camera getCameraInstance(){
+        Camera c = null;
+        try {
+            Log.e(TAG, "Open Camera");
+            c = Camera.open(0);
+            Camera.Parameters params = c.getParameters();
+            Log.e(TAG, "Camera parameters:  " + params.getPreviewSize().width + "x" + params.getPreviewSize().height);
+            // NV21 : 17, JPEG 256
+            Log.e(TAG, "Preview format (17 is NV21): " + params.getPreviewFormat() + ". Picture format(256 is JPEG): " + params.getPictureFormat());
+            List<int[]> fps = params.getSupportedPreviewFpsRange();
+            for(int[] i : fps){
+                Log.e(TAG, "###  fps range : [" + i[0] + "," + i[1] + "]\n");
+            }
+
+
+
+            Camera.Parameters parameters = c.getParameters();
+            //List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
+            List<Camera.Size> sizes = parameters.getSupportedPictureSizes();
+            for (Camera.Size cc : sizes){
+                Log.e(TAG, "=== width: " + cc.width + ". height:" + cc.height);
+            }
+            Camera.Size cs = sizes.get(3);
+            params.setPreviewSize(cs.width, cs.height);
+            params.setPictureSize(cs.width, cs.height);
+            params.setPictureFormat(ImageFormat.NV21);
+            c.setParameters(params);
+        }
+        catch (Exception e){
+            e.printStackTrace();
+        }
+        return c;
+    }
+
 }
index 59292bd..485afea 100644 (file)
@@ -14,6 +14,20 @@ extern "C" {
     #include "libavutil/time.h"
 }
 
+int64_t start_time;
+AVFormatContext *ofmt_ctx;
+AVStream* video_st;
+AVCodecContext* pCodecCtx;
+AVCodec* pCodec;
+AVPacket enc_pkt;
+AVFrame *pFrameYUV;
+
+
+int framecnt = 0;
+int yuv_width;
+int yuv_height;
+int y_length;
+int uv_length;
 
 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
     jint v = avformat_version();
@@ -21,22 +35,198 @@ JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *e
     return env->NewStringUTF("====== Ffmpeg call =======");
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *, jobject, jint, jint) {
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
+
+       //const char* out_path = "/storage/emulated/0/Movies/output.flv";
+
+    const char* out_path = "rtmp://192.168.0.101:1935/myapp/suanzi";
+
+
+    LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
+
+
+       yuv_width=width;
+       yuv_height=height;
+       y_length=width*height;
+       uv_length=width*height/4;
+
+
+       av_register_all();
+
+       //output initialize
+       avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
+       //output encoder initialize
+       pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
+       if (!pCodec){
+               LOGE("Can not find encoder!\n");
+               return -1;
+       }
+       pCodecCtx = avcodec_alloc_context3(pCodec);
+       pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+       pCodecCtx->width = width;
+       pCodecCtx->height = height;
+       pCodecCtx->time_base.num = 1;
+       pCodecCtx->time_base.den = 30;
+       pCodecCtx->bit_rate = 800000;
+       pCodecCtx->gop_size = 300;
+       /* Some formats want stream headers to be separate. */
+       if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
+               pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+
+       //H264 codec param
+       //pCodecCtx->me_range = 16;
+       //pCodecCtx->max_qdiff = 4;
+       //pCodecCtx->qcompress = 0.6;
+       pCodecCtx->qmin = 10;
+       pCodecCtx->qmax = 51;
+       //Optional Param
+       pCodecCtx->max_b_frames = 3;
+       // Set H264 preset and tune
+       AVDictionary *param = 0;
+       av_dict_set(&param, "preset", "ultrafast", 0);
+       av_dict_set(&param, "tune", "zerolatency", 0);
+
+       if (avcodec_open2(pCodecCtx, pCodec, &param) < 0){
+               LOGE("Failed to open encoder!\n");
+               return -1;
+       }
+
+       //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
+       video_st = avformat_new_stream(ofmt_ctx, pCodec);
+       if (video_st == NULL){
+               return -1;
+       }
+       video_st->time_base.num = 1;
+       video_st->time_base.den = 30;
+       video_st->codec = pCodecCtx;
+
+       //Open output URL,set before avformat_write_header() for muxing
+       jint ret = 0;
+       if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
+               LOGE("Failed to open output file! return :%d\n", ret);
+               return -1;
+       }
+
+       //Write File Header
+       avformat_write_header(ofmt_ctx, NULL);
+
+       start_time = av_gettime();
     return 0;
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *, jobject){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
+       int ret;
+       int got_frame;
+       AVPacket enc_pkt;
+       if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
+               return 0;
+       while (1) {
+               enc_pkt.data = NULL;
+               enc_pkt.size = 0;
+               av_init_packet(&enc_pkt);
+               ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
+                       NULL, &got_frame);
+               if (ret < 0)
+                       break;
+               if (!got_frame){
+                       ret = 0;
+                       break;
+               }
+               LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
+
+               //Write PTS
+               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
+               AVRational r_framerate1 = { 60, 2 };
+               AVRational time_base_q = { 1, AV_TIME_BASE };
+               //Duration between 2 frames (us)
+               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
+               //Parameters
+               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+               enc_pkt.dts = enc_pkt.pts;
+               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
+
+               //转换PTS/DTS(Convert PTS/DTS)
+               enc_pkt.pos = -1;
+               framecnt++;
+               ofmt_ctx->duration = enc_pkt.duration * framecnt;
+
+               /* mux encoded frame */
+               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+               if (ret < 0)
+                       break;
+       }
+       //Write file trailer
+       av_write_trailer(ofmt_ctx);
     return 0;
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *, jobject){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
+       if (video_st)
+               avcodec_close(video_st->codec);
+       avio_close(ofmt_ctx->pb);
+       avformat_free_context(ofmt_ctx);
     return 0;
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *, jobject, jbyteArray){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
+       int ret;
+       int enc_got_frame=0;
+       int i=0;
+
+    //LOGE(" process data - ffmpeg");
+       pFrameYUV = av_frame_alloc();
+       uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
+       avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
+
+       //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
+       jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
+       memcpy(pFrameYUV->data[0],in,y_length);
+       for(i=0;i<uv_length;i++)
+       {
+               *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
+               *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
+       }
+
+       pFrameYUV->format = AV_PIX_FMT_YUV420P;
+       pFrameYUV->width = yuv_width;
+       pFrameYUV->height = yuv_height;
+
+       enc_pkt.data = NULL;
+       enc_pkt.size = 0;
+       av_init_packet(&enc_pkt);
+       ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
+       av_frame_free(&pFrameYUV);
+
+       if (enc_got_frame == 1){
+               //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
+               framecnt++;
+               enc_pkt.stream_index = video_st->index;
+
+               //Write PTS
+               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
+               AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
+               AVRational time_base_q = { 1, AV_TIME_BASE };
+               //Duration between 2 frames (us)
+               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
+               //Parameters
+               //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+               enc_pkt.dts = enc_pkt.pts;
+               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+               enc_pkt.pos = -1;
+
+               //Delay
+               int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
+               int64_t now_time = av_gettime() - start_time;
+               if (pts_time > now_time)
+                       av_usleep(pts_time - now_time);
+
+               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+               av_free_packet(&enc_pkt);
+       }
     return 0;
 }
 
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *, jobject, jobject, jstring){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject obj2, jstring fname){
     return 0;
 }
\ No newline at end of file
index ba5a36e..326a1de 100644 (file)
@@ -7,6 +7,7 @@
 
 #include <android/log.h>
 
+
 #define LOG_TAG __FILE__
 
 #define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
index c1a2330..08749c5 100644 (file)
         app:layout_constraintRight_toRightOf="parent"
         app:layout_constraintTop_toTopOf="parent" />
 
+    <Button
+        android:id="@+id/button"
+        android:layout_width="wrap_content"
+        android:layout_height="wrap_content"
+        android:layout_marginBottom="18dp"
+        android:layout_marginEnd="149dp"
+        android:layout_marginStart="147dp"
+        android:text="Button"
+        app:layout_constraintBottom_toBottomOf="parent"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintStart_toStartOf="parent" />
+
+    <SurfaceView
+        android:id="@+id/surfaceView"
+        android:layout_width="292dp"
+        android:layout_height="152dp"
+        android:layout_marginEnd="5dp"
+        android:layout_marginStart="5dp"
+        android:layout_marginTop="10dp"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toTopOf="parent" />
+
 </android.support.constraint.ConstraintLayout>
\ No newline at end of file
index 144cdae..29bd495 100644 (file)
@@ -1,3 +1,4 @@
 <resources>
     <string name="app_name">RtmpClient</string>
+    <string name="btn">Button</string>
 </resources>