add user input
authorPeng Li <seudut@gmail.com>
Mon, 14 May 2018 20:22:40 +0000 (04:22 +0800)
committerPeng Li <seudut@gmail.com>
Mon, 14 May 2018 20:22:40 +0000 (04:22 +0800)
21 files changed:
app/build.gradle
app/src/main/java/ai/suanzi/rtmpclient/Ffmpeg.java
app/src/main/java/ai/suanzi/rtmpclient/MainActivity.java
app/src/main/java/ai/suanzi/rtmpclient/MyService.java
app/src/main/java/ai/suanzi/rtmpclient/UVCCamera.java [new file with mode: 0644]
app/src/main/java/ai/suanzi/rtmpclient/UserInfo.java [new file with mode: 0644]
app/src/main/jni/Android.mk
app/src/main/jni/Android2.mk [new file with mode: 0644]
app/src/main/jni/Application.mk
app/src/main/jni/UVCCamera.cpp [new file with mode: 0644]
app/src/main/jni/UVCCamera.h [new file with mode: 0644]
app/src/main/jni/UVCCamera/UVCCamera.cpp [new file with mode: 0644]
app/src/main/jni/UVCCamera/UVCCamera.h [new file with mode: 0644]
app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp [new file with mode: 0644]
app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.h
app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.cpp [new file with mode: 0644]
app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.h [new file with mode: 0644]
app/src/main/jni/libuvc-0.0.6/android/jni/Android.mk
app/src/main/jni/log.h
app/src/main/res/layout/activity_main.xml
app/src/main/res/values/strings.xml

index f67efaa..6e4aebf 100644 (file)
@@ -33,4 +33,5 @@ dependencies {
     androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.1'
     implementation files('libs/log4j-1.2.17.jar')
     implementation files('libs/android-logging-log4j-1.0.3.jar')
+    implementation 'com.android.support:design:27.1.1'
 }
index eb3203f..fae8409 100644 (file)
@@ -40,7 +40,7 @@ public class Ffmpeg {
     public native int close();
     public native int process(byte[] data);
     public native int play(Object surface, String fname);
-    public native int push(Object surface);
+    public native int push(Object surface, String url);
     public native int preview(Object surface);
     public native String getPerfectDevice();
     public native int test(int fd);
index 4080ee2..7e54153 100644 (file)
@@ -9,6 +9,7 @@ import android.graphics.SurfaceTexture;
 import android.hardware.usb.UsbDevice;
 import android.hardware.usb.UsbManager;
 import android.os.Environment;
+import android.support.design.widget.TextInputEditText;
 import android.support.v7.app.AppCompatActivity;
 import android.os.Bundle;
 import android.util.Log;
@@ -38,6 +39,8 @@ import android.hardware.usb.UsbDeviceConnection;
 import de.mindpipe.android.logging.log4j.LogConfigurator;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
+import android.net.wifi.WifiManager;
+import android.net.wifi.WifiInfo;
 
 public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback, Camera.PreviewCallback{
 
@@ -46,6 +49,7 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
     private Camera mCamera ;//= getCameraInstance();
     private StreamTask mStreamTask;
     private SurfaceHolder mHolder;
+    private UVCCamera uvcCamera;
     ExecutorService mExecutor = Executors.newSingleThreadExecutor();
     //Intent it = new Intent(getApplicationContext(), MyService.class);
     Intent intent = new Intent();
@@ -54,6 +58,13 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
     private UsbDevice usbCamera;
 
     private Logger gLogger;
+
+    private Button mBtnStart;
+    private TextInputEditText mTextServer;
+    private TextInputEditText mTextUser;
+    private TextInputEditText mTextCamera;
+    private String mMacAddr = "";
+
     private void configLog(){
         try {
             final LogConfigurator logConfigurator = new LogConfigurator();
@@ -73,52 +84,29 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
 
 
 
-
-
     @Override
     protected void onCreate(Bundle savedInstanceState) {
         super.onCreate(savedInstanceState);
         setContentView(R.layout.activity_main);
         configLog();
-        //Log.e(TAG, "Ffmpeg Version: " + ffmpeg.getVersion());
-
+        UserInfo.setConfigPath(getExternalFilesDir(null) + File.separator + "config");
+        this.mMacAddr = getMacAddr();
         gLogger.debug("#######################################");
+
         ffmpeg = Ffmpeg.getInstance();
 
         gLogger.debug(" this ia argument " + 3 + "fjeiofjei");
 
-        final Button btn = findViewById(R.id.button);
-        btn.setText("Start");
-        /*btn.setOnClickListener(new View.OnClickListener() {
-            @Override
-            public void onClick(View v) {
-                Log.e(TAG, "Button " + btn.getText() + " onClick");
-                if (mCamera == null) return;
-                if(btn.getText().equals("Start")){
-                    mCamera.startPreview();
-                    Camera.Parameters params = mCamera.getParameters();
-                    //params.setPreviewFpsRange(30000, 30000);
-                    //params.setPictureSize(320, 240);
-                    //params.setPictureFormat(ImageFormat.NV21);
-                    //mCamera.setParameters(params);
-
+        uvcCamera = new UVCCamera();
 
+        mBtnStart = findViewById(R.id.button);
+        mTextServer = findViewById(R.id.textServer);
+        mTextUser = findViewById(R.id.textUser);
+        mTextCamera = findViewById(R.id.textCamera);
 
+        loadConfig();
 
-                    ffmpeg.init(params.getPictureSize().width, params.getPictureSize().height);
-                } else {
-                    mCamera.setPreviewCallback(null);
-                    Toast.makeText(MainActivity.this, "encode done", Toast.LENGTH_SHORT).show();
-                    ffmpeg.flush();
-                    ffmpeg.close();
-                }
-                btn.setText(btn.getText().equals("Start") ? "Stop" : "Start");
-            }
-        });*/
 
-        /*this.mCamera = getCameraInstance();
-        if(checkCameraHardware(this)) Log.e(TAG, "has cameras: " + Camera.getNumberOfCameras());
-*/
         final SurfaceView surfaceView = findViewById(R.id.surfaceView);
         mHolder = surfaceView.getHolder();
         mHolder.addCallback(this);
@@ -127,21 +115,19 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
         intent.setAction("ai.suanzi.rtmpclient.service");
 
 
-        final Button btn2 = findViewById(R.id.button);
-
 
-        btn2.setOnClickListener(new View.OnClickListener(){
+        final String macAddr = getMacAddr();
+        final Context context = getApplicationContext();
+        mBtnStart.setOnClickListener(new View.OnClickListener(){
             @Override
             public void onClick(View view){
                 Log.e(TAG, "onclick2");
-                //ffmpeg.play(mHolder.getSurface(),"/storage/sdcard0/output.flv");
-                //ffmpeg.push(mHolder.getSurface());
-                //ffmpeg.preview(mHolder.getSurface());
-                // intent.putExtra("cmd",0);//0,开启前台服务,1,关闭前台服务
-               // startService(intent);
-                ffmpeg.getVersion();
+                String url = mTextServer.getText().toString() + "/" + mTextUser.getText().toString() + "_" + macAddr + "_" + mTextCamera.getText().toString();
+                gLogger.error("Url " + url);
+                intent.putExtra("url", url);
+                startService(intent);
+                saveConfig();
             }
-
         });
 
         /*btn2.post(new Runnable(){
@@ -153,7 +139,7 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
         });
 
         switchToBackground();*/
-/*
+
 
         usbManager = (UsbManager) getSystemService(Context.USB_SERVICE);
 
@@ -184,7 +170,7 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
             //device.getInterface()
         }
 
-*/
+
 
 
 
@@ -214,7 +200,10 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
                                     Log.e("device","MyThread3");//bhw
                                     UsbDeviceConnection connection = usbManager.openDevice(usbCamera);
                                     int fd = connection.getFileDescriptor();//获取文件描述符
-                                    ffmpeg.test(fd);
+                                    //ffmpeg.test(fd);
+                                    int ret = uvcCamera.open();
+                                    Log.e(TAG, "uvcCamera open return code:" + ret);
+
                                     Log.e("device","MyThread3  "+fd);
                                 }
                             }).start();
@@ -426,24 +415,6 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
         return c;
     }
 
-//    private void changeCameraPermission(){
-//        Log.e(TAG, "change /dev/video0 permission");
-//        try{
-//            /*Process sh = Runtime.getRuntime().exec("su", null,null);
-//            OutputStream  os = sh.getOutputStream();
-//            os.write(("chmod 666 /dev/video0").getBytes("ASCII"));
-//            os.flush();
-//            os.close();
-//            sh.waitFor();*/
-//
-//            Process sh = Runtime.getRuntime().exec(new String[]{"su", "-c", "system/bin/sh"});
-//            sh = Runtime.getRuntime().exec(new String[]{"chmod", "666", "/dev/video0"});
-//            sh.waitFor();
-//        } catch (Exception e){
-//            e.printStackTrace();
-//        }
-//    }
-
     private void switchToBackground(){
         Intent i = new Intent();
         i.setAction(Intent.ACTION_MAIN);
@@ -451,4 +422,27 @@ public class MainActivity extends AppCompatActivity implements SurfaceHolder.Cal
         this.startActivity(i);
     }
 
+    private String getMacAddr() {
+        WifiManager manager = (WifiManager) getApplicationContext().getSystemService(Context.WIFI_SERVICE);
+        WifiInfo info = manager.getConnectionInfo();
+        return info.getMacAddress().replace(":", ""); //02:00:00:00:00:00 - 020000000000
+    }
+
+    private void loadConfig() {
+        UserInfo info = UserInfo.getConfig();
+        mTextServer.setText(info.server);
+        mTextUser.setText(info.user);
+        mTextCamera.setText(info.cameraId);
+        mMacAddr = info.macAddr;
+        gLogger.error("loadConfig " + info.toString());
+    }
+
+    private void saveConfig() {
+        UserInfo info = UserInfo.getConfig();
+        info.update(mTextServer.getText().toString(), mTextUser.getText().toString(), mMacAddr, mTextCamera.getText().toString());
+        if(info.saveConfig()) {
+            gLogger.error("saveConfig: " + info.toString());
+            Toast.makeText(getApplicationContext(), "Wring config succeed ", Toast.LENGTH_LONG).show();
+        }
+    }
 }
index 768d95d..c2886ff 100644 (file)
@@ -17,24 +17,26 @@ public class MyService extends Service {
     private static final String TAG = "MyService";
     private Ffmpeg ffmpeg = Ffmpeg.getInstance();
     private  Boolean isRunning = false;
-
-    private Runnable runnable = new Runnable() {
+    //private String url = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
+    private FfmpegRunnable  runnable;
+    private class FfmpegRunnable implements Runnable {
+        private String url;
+        public FfmpegRunnable(String _url){
+            this.url = _url;
+        }
         @Override
-        public void run() {
-            Log.e(TAG, "Run ffmpeg");
+        public void run(){
+            Log.e(TAG, "Run ffmpeg, url: " + url);
             isRunning = true;
-            ffmpeg.push(null);
+            ffmpeg.push(null, this.url);
         }
-    };
-
+    }
 
     /**
      * id不可设置为0,否则不能设置为前台service
      */
     private static final int NOTIFICATION_DOWNLOAD_PROGRESS_ID = 0x0001;
 
-    //private boolean isRemove=false;//是否需要移除
-
     /**
      * Notification
      */
@@ -92,26 +94,16 @@ public class MyService extends Service {
     @Override
     public int onStartCommand(Intent intent, int flags, int startId) {
         Log.e(TAG, "onStartCommand");
+
+        String url = intent.getExtras().getString("url");
+        Log.e(TAG, "Url is: " + url);
+        runnable = new FfmpegRunnable(url);
+        //this.url = url;
         if (!isRunning) {
             createNotification();
-            Toast.makeText(this, "Ffmpeg started", Toast.LENGTH_LONG).show();
+            Toast.makeText(this, "Video stream pushed to " + url, Toast.LENGTH_LONG).show();
             new Thread(runnable).start();
         }
-
-//        int i=intent.getExtras().getInt("cmd");
-//        if(i==0){
-//            if(!isRemove) {
-//
-//                createNotification();
-//            }
-//            isRemove=true;
-//        }else {
-//            //移除前台服务
-//            if (isRemove) {
-//                stopForeground(true);
-//            }
-//            isRemove=false;
-//        }
         //super.onStartCommand(intent, flags, startId);
         return START_STICKY;
     }
@@ -121,5 +113,4 @@ public class MyService extends Service {
         super.onLowMemory();
         Log.e(TAG, "onLowMemory");
     }
-
 }
diff --git a/app/src/main/java/ai/suanzi/rtmpclient/UVCCamera.java b/app/src/main/java/ai/suanzi/rtmpclient/UVCCamera.java
new file mode 100644 (file)
index 0000000..131c938
--- /dev/null
@@ -0,0 +1,21 @@
+package ai.suanzi.rtmpclient;
+
+import android.util.Log;
+
+public class UVCCamera {
+
+    static {
+        System.loadLibrary("usb1.0");
+        System.loadLibrary("uvc");
+        //System.loadLibrary("UVCCamera");
+    }
+
+    public UVCCamera(){
+        Log.e("UVC", " uvc camera");
+        init();
+    }
+
+    public native int open();
+    public native void init();
+
+}
diff --git a/app/src/main/java/ai/suanzi/rtmpclient/UserInfo.java b/app/src/main/java/ai/suanzi/rtmpclient/UserInfo.java
new file mode 100644 (file)
index 0000000..00e5e23
--- /dev/null
@@ -0,0 +1,109 @@
+package ai.suanzi.rtmpclient;
+
+import android.content.Context;
+import android.util.Log;
+import android.widget.Toast;
+
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.json.JSONException;
+import org.json.JSONObject;
+
+public class UserInfo {
+
+    public String server = "";
+    public String user = "suanzi";
+    public String macAddr = "ac83f34ead90";
+    public String cameraId = "cameraId";
+    private static String configPath;
+
+    private static UserInfo instance = null;
+    private UserInfo () {}
+
+    public static void setConfigPath(String fname){
+        configPath = fname;
+    }
+
+    public static UserInfo getConfig() {
+        if (instance != null) return instance;
+        File file = new File(configPath);
+        StringBuilder text = new StringBuilder();
+        try {
+            BufferedReader br = new BufferedReader(new FileReader(file));
+            String line;
+            while((line = br.readLine()) != null){
+                text.append(line);
+            }
+            br.close();
+        }catch (IOException e){
+            e.printStackTrace();
+        }
+
+        String json = text.toString();
+
+        UserInfo info = new UserInfo();
+
+        try {
+            JSONObject jobj = new JSONObject(json);
+            info.server = jobj.getString("server");
+            info.user = jobj.getString("user");
+            info.macAddr = jobj.getString("macAddr");
+            info.cameraId = jobj.getString("cameraId");
+        } catch (JSONException e){
+            e.printStackTrace();
+        }
+        instance = info;
+        return info;
+    }
+
+    public boolean saveConfig() {
+
+        String jstring = toString();
+
+        Log.e("Config", "xxxxxxxxx "  + jstring);
+
+        File file = new File(configPath);
+        try{
+            BufferedWriter bw = new BufferedWriter(new FileWriter(file));
+            bw.write(jstring);
+            bw.close();
+        } catch (IOException e){
+            e.printStackTrace();
+            return false;
+        }
+        return true;
+    }
+
+    public void update(String server, String user, String macAddr, String cameraId) {
+        this.server = server;
+        this.user = user;
+        this.macAddr = macAddr;
+        this.cameraId = cameraId;
+    }
+
+    public String toString () {
+        JSONObject obj = toJsonObj();
+        if (obj.equals(null)) return "";
+        return obj.toString();
+    }
+
+    private JSONObject toJsonObj () {
+        try {
+            JSONObject obj = new JSONObject();
+            obj.put("server", this.server);
+            obj.put("user", this.user);
+            obj.put("macAddr", this.macAddr);
+            obj.put("cameraId", this.cameraId);
+            return obj;
+        } catch (JSONException e) {
+            e.printStackTrace();
+            return null;
+        }
+    }
+}
index 9837ea0..0e63b81 100644 (file)
@@ -1,6 +1,110 @@
-MY_LOCAL_PATH := $(call my-dir)
-include $(MY_LOCAL_PATH)/libusb-1.0.22/android/jni/Android.mk
-include $(MY_LOCAL_PATH)/libuvc-0.0.6/android/jni/Android.mk
-include $(MY_LOCAL_PATH)/UVCCamera/Android.mk
+JNI_PATH := $(call my-dir)
+include $(JNI_PATH)/libusb-1.0.22/android/jni/Android.mk
+include $(JNI_PATH)/libuvc-0.0.6/android/jni/Android.mk
 
 
+#include $(JNI_PATH)/UVCCamera/Android.mk
+#include $(JNI_PATH)/Android2.mk
+
+
+
+##############
+#LOCAL_PATH := $(call my-dir)
+LOCAL_PATH := $(JNI_PATH)
+
+#$(warning $(LOCAL_PATH))
+FFMPEG_DIR := $(abspath $(LOCAL_PATH)/ffmpeg-3.0.11)
+UVC_DIR := $(abspath $(LOCAL_PATH)/libuvc-0.0.6)
+
+
+
+# avdevice
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavdevice
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavdevice.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avcodec
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavcodec
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavcodec.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avformat
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavformat
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavformat.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avfilter
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavfilter
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavfilter.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# swresample
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libswresample
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libswresample.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# swscale
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libswscale
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libswscale.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avutil
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavutil
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavutil.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+ifeq ($(TARGET_ARCH),x86)
+# x264
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libx264
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libx264.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+endif
+
+# postproc
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libpostproc
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libpostproc.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+
+
+# ffmpegjni
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := ffmpeg-jni
+LOCAL_SRC_FILES := ai_suanzi_rtmpclient_Ffmpeg.cpp \
+                   ai_suanzi_rtmpclient_UVCCamera.cpp \
+                   UVCCamera.cpp
+
+LOCAL_C_INCLUDES := $(UVC_ROOT)/include
+ifeq ($(TARGET_ARCH),x86)
+LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc x264 uvc
+else
+LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc uvc
+endif
+LOCAL_CFLAGS := -D__ANDROID_API__=21
+#LOCAL_CFLAGS += -I$(LOCAL_PATH)/../ffmpeg-3.0.11/include
+#LOCAL_CFLAGS += -I$(FFMPEG_DIR)/include
+#LOCAL_CFLAGS += -Ijni/ffmpeg-3.0.11/include
+
+
+LOCAL_LDLIBS :=-llog -landroid
+include $(BUILD_SHARED_LIBRARY)
+
diff --git a/app/src/main/jni/Android2.mk b/app/src/main/jni/Android2.mk
new file mode 100644 (file)
index 0000000..cf5e691
--- /dev/null
@@ -0,0 +1,91 @@
+LOCAL_PATH := $(call my-dir)
+$(warning $(LOCAL_PATH))
+FFMPEG_DIR := $(abspath $(LOCAL_PATH)/../ffmpeg-3.0.11)
+#$(warning $(FFMPEG_DIR))
+
+
+# avdevice
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavdevice
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavdevice.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avcodec
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavcodec
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavcodec.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avformat
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavformat
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavformat.so
+LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avfilter
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavfilter
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavfilter.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# swresample
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libswresample
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libswresample.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# swscale
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libswscale
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libswscale.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+# avutil
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libavutil
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libavutil.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+ifeq ($(TARGET_ARCH),x86)
+# x264
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libx264
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libx264.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+endif
+
+# postproc
+include $(CLEAR_VARS)
+LOCAL_MODULE:= libpostproc
+LOCAL_SRC_FILES:= $(FFMPEG_DIR)/lib/$(TARGET_ARCH)/libpostproc.so
+#LOCAL_EXPORT_C_INCLUDES:= $(FFMPEG_DIR)/include
+include $(PREBUILT_SHARED_LIBRARY)
+
+
+
+# ffmpegjni
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := ffmpeg-jni
+LOCAL_SRC_FILES := ai_suanzi_rtmpclient_Ffmpeg.cpp
+ifeq ($(TARGET_ARCH),x86)
+LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc x264
+else
+LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc
+endif
+LOCAL_CFLAGS := -D__ANDROID_API__=21
+#LOCAL_CFLAGS += -I$(LOCAL_PATH)/../ffmpeg-3.0.11/include
+#LOCAL_CFLAGS += -I$(FFMPEG_DIR)/include
+#LOCAL_CFLAGS += -Ijni/ffmpeg-3.0.11/include
+
+
+LOCAL_LDLIBS :=-llog -landroid
+include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
index af9052a..5965130 100644 (file)
@@ -1 +1 @@
-APP_ABI := armeabi-v7a
\ No newline at end of file
+APP_ABI := armeabi-v7a x86
\ No newline at end of file
diff --git a/app/src/main/jni/UVCCamera.cpp b/app/src/main/jni/UVCCamera.cpp
new file mode 100644 (file)
index 0000000..c7c5146
--- /dev/null
@@ -0,0 +1,75 @@
+//
+// Created by Peng Li on 14/5/2018.
+//
+
+#include "UVCCamera.h"
+//#include <jni.h>
+#include "log.h"
+#include "libuvc/libuvc.h"
+
+
+
+UVCCamera::~UVCCamera()
+{
+}
+
+//jint UVCCamera::nativeOnLoad(JavaVM *jvm, void* reserved)
+//{    
+    // N.B. within the context of a JVM thread here
+    // get a JNIEnv by attempting to get it directly
+//    JNIEnv *env;
+//    if (jvm) jvm->GetEnv (reinterpret_cast<void**>(&env), JNI_VERSION_1_2);
+
+    // get a handle to the Java class in the JVM 
+    // and cache it becuse FindClass doesn't work in native worker threads because Java is stupid
+    // http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+//    jclass local_ref = 0;
+//    if (env) local_ref = env->FindClass ("com/cisco/ecc/testapp/EccTestApp");
+//    jclass global_ref = reinterpret_cast<jclass> (env->NewGlobalRef (local_ref));
+
+    // Create the singleton
+//    singleton.reset (new EccTestApp (jvm, global_ref));
+
+    // return the JNI version needed. I think we can basicaly return anything other than 1.1 here
+    // unless it turns out we need some specific JNI methods from a later version
+//    return JNI_VERSION_1_2;
+ //   return 0;
+//}
+
+void UVCCamera::init()
+{
+    LOGE("UVCCamera::init");
+}
+
+int UVCCamera::open()
+{
+    LOGE("UVCCamera::open");
+
+    uvc_context_t *ctx;
+    uvc_device_t *dev;
+    uvc_device_handle_t *devh;
+    uvc_error_t res;
+
+    if((res = uvc_init(&ctx, NULL)) < 0){
+        uvc_perror(res, "unc_init");
+        return res;
+    }
+    LOGE("UVC Initialized");
+
+    if ((res = uvc_find_device(ctx, &dev, 0, 0, NULL)) < 0){
+        uvc_perror(res, "uvc_find_device");
+        return res;
+    }
+    LOGE("Device Found");
+    
+    if ((res = uvc_open(dev, &devh)) < 0){
+        uvc_perror(res, "uvc_open");
+        return res;
+    }
+
+    LOGE("Device Opened");
+    uvc_print_diag(devh, stderr);
+
+    uvc_exit(ctx);
+    return 1;
+}
diff --git a/app/src/main/jni/UVCCamera.h b/app/src/main/jni/UVCCamera.h
new file mode 100644 (file)
index 0000000..fbffa9d
--- /dev/null
@@ -0,0 +1,19 @@
+//
+// Created by Peng Li on 14/5/2018.
+//
+
+#ifndef RTMPCLIENT_UVCCAMERA_H
+#define RTMPCLIENT_UVCCAMERA_H
+
+class UVCCamera {
+public: 
+    virtual ~UVCCamera();
+
+public:
+    //static jint nativeOnLoad(JavaVM *jvm, void* reserved);
+    static void init();
+    static int open();
+
+};
+
+#endif //RTMPCLIENT_UVCCAMERA_H
diff --git a/app/src/main/jni/UVCCamera/UVCCamera.cpp b/app/src/main/jni/UVCCamera/UVCCamera.cpp
new file mode 100644 (file)
index 0000000..8586e1b
--- /dev/null
@@ -0,0 +1,5 @@
+//
+// Created by Peng Li on 14/5/2018.
+//
+
+#include "UVCCamera.h"
diff --git a/app/src/main/jni/UVCCamera/UVCCamera.h b/app/src/main/jni/UVCCamera/UVCCamera.h
new file mode 100644 (file)
index 0000000..4b0bb26
--- /dev/null
@@ -0,0 +1,8 @@
+//
+// Created by Peng Li on 14/5/2018.
+//
+
+#ifndef RTMPCLIENT_UVCCAMERA_H
+#define RTMPCLIENT_UVCCAMERA_H
+
+#endif //RTMPCLIENT_UVCCAMERA_H
diff --git a/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp b/app/src/main/jni/ai_suanzi_rtmpclient_Ffmpeg.cpp
new file mode 100644 (file)
index 0000000..cba5938
--- /dev/null
@@ -0,0 +1,1062 @@
+//
+// Created by Peng Li on 30/4/2018.
+//
+#include "ai_suanzi_rtmpclient_Ffmpeg.h"
+#include <android/native_window.h>
+#include <android/native_window_jni.h>
+#include "log.h"
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <unistd.h>
+
+extern "C" {
+    #include "libavformat/avformat.h"
+    #include "libavcodec/avcodec.h"
+    #include "libswscale/swscale.h"
+    #include "libavutil/imgutils.h"
+    #include "libavutil/time.h"
+    #include "libavdevice/avdevice.h"
+}
+
+int64_t start_time;
+AVFormatContext *ofmt_ctx;
+AVStream* video_st;
+AVCodecContext* pCodecCtx;
+AVCodec* pCodec;
+AVPacket enc_pkt;
+AVFrame *pFrameYUV;
+
+
+void custom_log(void *ptr, int level, const char* fmt, va_list vl){
+    //To TXT file
+    /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
+    if(fp){
+    vfprintf(fp,fmt,vl);
+    fflush(fp);
+    fclose(fp);
+    }  */
+    //To Logcat
+    // LOGE(fmt, vl);
+    static int print_prefix = 1;
+    //static char prev[1024];
+    char line[1024];
+
+    av_log_format_line(ptr, level, fmt, vl, line, sizeof(line), &print_prefix);
+
+    //strcpy(prev, line);
+    //sanitize((uint8_t *)line);
+
+    if (level <= AV_LOG_WARNING){
+        LOGE("%s", line);
+    } else {
+        LOGE("%s", line);
+    }
+}
+
+
+int framecnt = 0;
+int yuv_width;
+int yuv_height;
+int y_length;
+int uv_length;
+
+JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init__ (JNIEnv *env, jobject obj ){
+    LOGE("########## Ffmpeg Init ##########");
+    unsigned int v = avutil_version();
+    LOGE("libavutil - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
+    v = avcodec_version();
+    LOGE("libavcodec - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
+    v = avformat_version();
+    LOGE("libavformat - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
+    v = avdevice_version();
+    LOGE("libavdevice - %d.%d.%d", AV_VERSION_MAJOR(v), AV_VERSION_MINOR(v), AV_VERSION_MICRO(v));
+
+    av_log_set_level(AV_LOG_TRACE);
+    av_register_all();
+    avdevice_register_all();
+    avformat_network_init();
+    av_log_set_callback(custom_log);
+}
+
+
+JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
+    jint v = avformat_version();
+        LOGE("######### Ffmpeg JNI version i= %d", v);
+
+        system("su -c chmod 666 /dev/video0");
+
+    LOGE("######### Ffmpeg JNI version i= %d", v);
+
+
+    /*AVFormatContext *pFormatCtx = avformat_alloc_context();
+            avdevice_register_all();
+              av_log_set_callback(custom_log);
+        AVInputFormat *ifmt=av_find_input_format("video4linux2");
+        LOGE("===%s===", ifmt->name);
+        if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+            LOGE("Couldn't open input stream.\n");
+                return env->NewStringUTF("===== error =======");
+
+            //return -1;
+        }*/
+
+    return env->NewStringUTF("====== Ffmpeg call =======");
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init (JNIEnv *env, jobject obj, jint width, jint height) {
+
+       //const char* out_path = "/storage/emulated/0/Movies/output.flv";
+
+    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
+    const char* out_path = "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
+
+    // const char* out_path = "/storage/sdcard0/output.flv";
+
+
+
+    LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
+
+       yuv_width=width;
+       yuv_height=height;
+       y_length=width*height;
+       uv_length=width*height/4;
+
+
+       av_register_all();
+
+       //output initialize
+       avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
+       //output encoder initialize
+       pCodec = avcodec_find_encoder(AV_CODEC_ID_H264);
+       if (!pCodec){
+               LOGE("Can not find encoder!\n");
+               return -1;
+       }
+       pCodecCtx = avcodec_alloc_context3(pCodec);
+       pCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+       pCodecCtx->width = width;
+       pCodecCtx->height = height;
+       pCodecCtx->time_base.num = 1;
+       pCodecCtx->time_base.den = 30;
+       pCodecCtx->bit_rate = 800000;
+       pCodecCtx->gop_size = 300;
+       /* Some formats want stream headers to be separate. */
+       if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
+               pCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+
+       //H264 codec param
+       //pCodecCtx->me_range = 16;
+       //pCodecCtx->max_qdiff = 4;
+       //pCodecCtx->qcompress = 0.6;
+       pCodecCtx->qmin = 10;
+       pCodecCtx->qmax = 51;
+       //Optional Param
+       pCodecCtx->max_b_frames = 3;
+       // Set H264 preset and tune
+       AVDictionary *param = 0;
+       av_dict_set(&param, "preset", "ultrafast", 0);
+       av_dict_set(&param, "tune", "zerolatency", 0);
+
+       if (avcodec_open2(pCodecCtx, pCodec, &param) < 0){
+               LOGE("Failed to open encoder!\n");
+               return -1;
+       }
+
+       //Add a new stream to output,should be called by the user before avformat_write_header() for muxing
+       video_st = avformat_new_stream(ofmt_ctx, pCodec);
+       if (video_st == NULL){
+               return -1;
+       }
+       video_st->time_base.num = 1;
+       video_st->time_base.den = 30;
+       video_st->codec = pCodecCtx;
+
+       //Open output URL,set before avformat_write_header() for muxing
+       jint ret = 0;
+       if (( ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
+               LOGE("Failed to open output file! return :%d\n", ret);
+               return -1;
+       }
+
+       //Write File Header
+       avformat_write_header(ofmt_ctx, NULL);
+
+       start_time = av_gettime();
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_flush (JNIEnv *env, jobject obj){
+       int ret;
+       int got_frame;
+       AVPacket enc_pkt;
+       if (!(ofmt_ctx->streams[0]->codec->codec->capabilities & CODEC_CAP_DELAY))
+               return 0;
+       while (1) {
+               enc_pkt.data = NULL;
+               enc_pkt.size = 0;
+               av_init_packet(&enc_pkt);
+               ret = avcodec_encode_video2(ofmt_ctx->streams[0]->codec, &enc_pkt,
+                       NULL, &got_frame);
+               if (ret < 0)
+                       break;
+               if (!got_frame){
+                       ret = 0;
+                       break;
+               }
+               LOGE("Flush Encoder: Succeed to encode 1 frame!\tsize:%5d\n", enc_pkt.size);
+
+               //Write PTS
+               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
+               AVRational r_framerate1 = { 60, 2 };
+               AVRational time_base_q = { 1, AV_TIME_BASE };
+               //Duration between 2 frames (us)
+               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
+               //Parameters
+               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+               enc_pkt.dts = enc_pkt.pts;
+               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base);
+
+               //转换PTS/DTS(Convert PTS/DTS)
+               enc_pkt.pos = -1;
+               framecnt++;
+               ofmt_ctx->duration = enc_pkt.duration * framecnt;
+
+               /* mux encoded frame */
+               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+               if (ret < 0)
+                       break;
+       }
+       //Write file trailer
+       av_write_trailer(ofmt_ctx);
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_close (JNIEnv *env, jobject obj){
+       if (video_st)
+               avcodec_close(video_st->codec);
+       avio_close(ofmt_ctx->pb);
+       avformat_free_context(ofmt_ctx);
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_process (JNIEnv *env, jobject obj, jbyteArray yuv){
+       int ret;
+       int enc_got_frame=0;
+       int i=0;
+
+    //LOGE(" process data - ffmpeg");
+       pFrameYUV = av_frame_alloc();
+       uint8_t *out_buffer = (uint8_t *)av_malloc(avpicture_get_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
+       avpicture_fill((AVPicture *)pFrameYUV, out_buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
+
+       //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式
+       jbyte* in= (jbyte*)env->GetByteArrayElements(yuv,0);
+       memcpy(pFrameYUV->data[0],in,y_length);
+       for(i=0;i<uv_length;i++)
+       {
+               *(pFrameYUV->data[2]+i)=*(in+y_length+i*2);
+               *(pFrameYUV->data[1]+i)=*(in+y_length+i*2+1);
+       }
+
+       pFrameYUV->format = AV_PIX_FMT_YUV420P;
+       pFrameYUV->width = yuv_width;
+       pFrameYUV->height = yuv_height;
+
+       enc_pkt.data = NULL;
+       enc_pkt.size = 0;
+       av_init_packet(&enc_pkt);
+       ret = avcodec_encode_video2(pCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
+       av_frame_free(&pFrameYUV);
+
+       if (enc_got_frame == 1){
+               //LOGE("Succeed to encode frame: %5d\tsize:%5d\n", framecnt, enc_pkt.size);
+               framecnt++;
+               enc_pkt.stream_index = video_st->index;
+
+               //Write PTS
+               AVRational time_base = ofmt_ctx->streams[0]->time_base;//{ 1, 1000 };
+               AVRational r_framerate1 = {60, 2 };//{ 50, 2 };
+               AVRational time_base_q = { 1, AV_TIME_BASE };
+               //Duration between 2 frames (us)
+               int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));      //内部时间戳
+               //Parameters
+               //enc_pkt.pts = (double)(framecnt*calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+               enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+               enc_pkt.dts = enc_pkt.pts;
+               enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+               enc_pkt.pos = -1;
+
+               //Delay
+               int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
+               int64_t now_time = av_gettime() - start_time;
+               if (pts_time > now_time)
+                       av_usleep(pts_time - now_time);
+
+               ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+               av_free_packet(&enc_pkt);
+       }
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
+
+
+
+
+
+    LOGE("###### video play #####");
+    // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
+    const char * file_name = env->GetStringUTFChars(fname, 0);
+
+    av_register_all();
+      avdevice_register_all();
+
+
+    AVFormatContext * pFormatCtx = avformat_alloc_context();
+
+
+//////////
+              av_log_set_callback(custom_log);
+
+     AVInputFormat *ifmt=av_find_input_format("video4linux2");
+     LOGE("===%s===", ifmt->name);
+     if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+             LOGE("Couldn't open file:\n");
+             return -1; // Couldn't open file
+     }
+
+
+///////////
+
+/*
+    // Open video file
+    if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
+
+        LOGE("Couldn't open file:%s\n", file_name);
+        return -1; // Couldn't open file
+    }
+*/
+    // Retrieve stream information
+    if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
+        LOGE("Couldn't find stream information.");
+        return -1;
+    }
+
+    // Find the first video stream
+    int videoStream = -1, i;
+    for (i = 0; i < pFormatCtx->nb_streams; i++) {
+        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
+           && videoStream < 0) {
+            videoStream = i;
+        }
+    }
+    if(videoStream==-1) {
+        LOGE("Didn't find a video stream.");
+        return -1; // Didn't find a video stream
+    }
+
+    // Get a pointer to the codec context for the video stream
+    AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
+    LOGE("============= %d ========",__LINE__);
+    // Find the decoder for the video stream
+    AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+    if(pCodec==NULL) {
+        LOGE("Codec not found.");
+        return -1; // Codec not found
+    }
+
+    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
+        LOGE("Could not open codec.");
+        return -1; // Could not open codec
+    }
+
+    // 获取native window
+    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
+
+    // 获取视频宽高
+    int videoWidth = pCodecCtx->width;
+    int videoHeight = pCodecCtx->height;
+
+    // 设置native window的buffer大小,可自动拉伸
+    ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
+    ANativeWindow_Buffer windowBuffer;
+
+    if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
+        LOGE("Could not open codec.");
+        return -1; // Could not open codec
+    }
+
+    LOGE("stream format:%s", pFormatCtx->iformat->name);
+    LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
+    LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
+    LOGE("Decoder name:%s", pCodec->name);
+
+    // Allocate video frame
+    AVFrame * pFrame = av_frame_alloc();
+
+    // 用于渲染
+    AVFrame * pFrameRGBA = av_frame_alloc();
+    if(pFrameRGBA == NULL || pFrame == NULL) {
+        LOGE("Could not allocate video frame.");
+        return -1;
+    }
+
+    // Determine required buffer size and allocate buffer
+    int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
+    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
+                         pCodecCtx->width, pCodecCtx->height, 1);
+
+    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
+    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
+                             pCodecCtx->height,
+                             pCodecCtx->pix_fmt,
+                             pCodecCtx->width,
+                             pCodecCtx->height,
+                             AV_PIX_FMT_RGBA,
+                             SWS_BILINEAR,
+                             NULL,
+                             NULL,
+                             NULL);
+
+    int frameFinished;
+    AVPacket packet;
+    while(av_read_frame(pFormatCtx, &packet)>=0) {
+        // Is this a packet from the video stream?
+        if(packet.stream_index==videoStream) {
+
+            // Decode video frame
+            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
+
+            // 并不是decode一次就可解码出一帧
+            if (frameFinished) {
+
+                // lock native window buffer
+                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
+
+                // 格式转换
+                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
+                          pFrame->linesize, 0, pCodecCtx->height,
+                          pFrameRGBA->data, pFrameRGBA->linesize);
+
+                // 获取stride
+                uint8_t * dst = (uint8_t*) windowBuffer.bits;
+                int dstStride = windowBuffer.stride * 4;
+                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
+                int srcStride = pFrameRGBA->linesize[0];
+
+                // 由于window的stride和帧的stride不同,因此需要逐行复制
+                int h;
+                for (h = 0; h < videoHeight; h++) {
+                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
+                }
+
+                ANativeWindow_unlockAndPost(nativeWindow);
+            }
+
+        }
+        av_packet_unref(&packet);
+    }
+
+    av_free(buffer);
+    av_free(pFrameRGBA);
+
+    // Free the YUV frame
+    av_free(pFrame);
+
+    // Close the codecs
+    avcodec_close(pCodecCtx);
+
+    // Close the video file
+    avformat_close_input(&pFormatCtx);
+
+     env->ReleaseStringUTFChars(fname, file_name);
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push (JNIEnv *env, jobject obj, jobject surface, jstring url){
+
+    /*
+    av_log_set_level(AV_LOG_TRACE);
+    av_register_all();
+    avformat_network_init();
+    avdevice_register_all();
+    */
+
+    LOGE("====push=====");
+//    av_log_set_callback(custom_log);
+ // Open Output
+    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
+    //const char* out_path =  "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
+    const char* out_path =  env->GetStringUTFChars(url, 0);
+    //const char * file_name = env->GetStringUTFChars(fname, 0);
+
+
+    int ret = 0;
+    /// Open Input
+    AVFormatContext *pFormatCtx = avformat_alloc_context();
+
+    AVInputFormat *ifmt = av_find_input_format("video4linux2");
+    if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
+    //    if((ret = avformat_open_input(&pFormatCtx, "/dev/bus/usb/003/007", ifmt, NULL)) != 0) {
+
+        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
+        return -1;
+    }
+
+    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
+        LOGE( "could not find stream info");
+        return -1;
+    }
+
+    av_dump_format(pFormatCtx, 0, "0", 0);
+
+    AVCodec *dec;
+    int video_index = -1;
+    if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
+        LOGE( "error");
+        return -1;
+    }
+
+    AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
+    if(avcodec_open2(pCodecCtx, dec, NULL) <0){
+        LOGE( "eee");
+        return -1;
+    }
+
+
+    // Open Output
+    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
+    //const char* out_path =  "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
+
+    AVFormatContext *ofmt_ctx;
+    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
+    AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
+    if (!oDec) {
+        LOGE("Can not find endoder");
+        return -1;
+    }
+
+    AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
+    oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+    oCodecCtx->width = pCodecCtx->width;
+    oCodecCtx->height = pCodecCtx->height;
+    oCodecCtx->time_base.num = 1;
+    oCodecCtx->time_base.den = 30;
+    oCodecCtx->bit_rate = 800000;
+    oCodecCtx->gop_size = 300;
+    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
+        oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+    oCodecCtx->qmin = 10;
+    oCodecCtx->qmax = 51;
+    oCodecCtx->max_b_frames = 3;
+
+    AVDictionary *params = 0;
+    av_dict_set(&params, "preset", "ultrafast", 0);
+    av_dict_set(&params, "tune", "zerolatency", 0);
+
+    if (avcodec_open2(oCodecCtx, oDec, &params) < 0){
+        LOGE("Failed to open encoder");
+        return -1;
+    }
+
+    AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
+    if (videoStream == NULL){
+        return -1;
+    }
+
+    videoStream->time_base.num = 1;
+    videoStream->time_base.den = 30;
+    videoStream->codec = oCodecCtx;
+
+    if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
+        LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
+        //LOGE("Failed open out file22 erro=%d", ret);
+        return -1;
+    }
+
+    avformat_write_header(ofmt_ctx, NULL);
+    /////////////
+
+
+
+
+    //
+    AVFrame *pFrame, *pFrameYUV;
+    pFrame = av_frame_alloc();
+    pFrameYUV = av_frame_alloc();
+
+    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
+    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+
+    pFrameYUV->format = AV_PIX_FMT_YUV420P;
+    pFrameYUV->width = pCodecCtx->width;
+    pFrameYUV->height = pCodecCtx->height;
+
+    struct SwsContext *img_convert_ctx;
+    img_convert_ctx = sws_getContext(pCodecCtx->width,
+                              pCodecCtx->height,
+                              pCodecCtx->pix_fmt,
+                              pCodecCtx->width,
+                              pCodecCtx->height,
+                              AV_PIX_FMT_YUV420P,
+                              SWS_BICUBIC,
+                              NULL, NULL, NULL);
+
+    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
+    int got_picture = 0;
+
+    AVPacket enc_pkt ;
+
+    int64_t framecnt = 0;
+
+    while(av_read_frame(pFormatCtx, packet) >= 0){
+        if (packet->stream_index == video_index){
+            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
+            if (ret < 0){
+                LOGE("Decode Error.");
+                return -1;
+            }
+            if (got_picture){
+                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
+
+                enc_pkt.data = NULL;
+                enc_pkt.size = 0;
+                av_init_packet(&enc_pkt);
+                int enc_got_frame = 0;
+                ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
+                if (enc_got_frame == 1){
+
+                           framecnt++;
+                    enc_pkt.stream_index = videoStream->index;
+
+                    // write PTS
+                    AVRational time_base = ofmt_ctx->streams[0]->time_base;
+                    AVRational r_framerate1 = {60, 2};
+                    AVRational time_base_q = {1, AV_TIME_BASE};
+
+                           int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));  //内部时间戳
+                    enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+                    enc_pkt.dts = enc_pkt.pts;
+                    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+                    enc_pkt.pos = -1;
+
+                    int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
+
+                ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+                //av_frame_free(&pFrameYUV);
+                //av_packet_unref(packet);
+
+                av_free_packet(&enc_pkt);
+                //av_packet_unref(&enc_pkt);
+                }
+            }
+        }
+        av_packet_unref(packet);
+    }
+
+    sws_freeContext(img_convert_ctx);
+    av_free(pFrameYUV);
+    av_free(pFrame);
+    avcodec_close(pCodecCtx);
+    avformat_close_input(&pFormatCtx);
+    return 0;
+}
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview (JNIEnv *env, jobject obj, jobject surface){
+
+    LOGE("###### video preview #####");
+
+    av_register_all();
+    avdevice_register_all();
+
+
+    AVFormatContext * pFormatCtx = avformat_alloc_context();
+
+
+    av_log_set_callback(custom_log);
+
+     AVInputFormat *ifmt=av_find_input_format("video4linux2");
+     LOGE("===%s===", ifmt->name);
+     if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+             LOGE("Couldn't open file:\n");
+             return -1; // Couldn't open file
+     }
+
+    // Retrieve stream information
+    if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
+        LOGE("Couldn't find stream information.");
+        return -1;
+    }
+
+    // Find the first video stream
+    int videoStream = -1, i;
+    for (i = 0; i < pFormatCtx->nb_streams; i++) {
+        if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
+           && videoStream < 0) {
+            videoStream = i;
+        }
+    }
+    if(videoStream==-1) {
+        LOGE("Didn't find a video stream.");
+        return -1; // Didn't find a video stream
+    }
+
+    // Get a pointer to the codec context for the video stream
+    AVCodecContext  * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
+    LOGE("============= %d ========",__LINE__);
+    // Find the decoder for the video stream
+    AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+    if(pCodec==NULL) {
+        LOGE("Codec not found.");
+        return -1; // Codec not found
+    }
+
+    if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
+        LOGE("Could not open codec.");
+        return -1; // Could not open codec
+    }
+
+    // 获取native window
+    ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
+
+    // 获取视频宽高
+    int videoWidth = pCodecCtx->width;
+    int videoHeight = pCodecCtx->height;
+
+    // 设置native window的buffer大小,可自动拉伸
+    ANativeWindow_setBuffersGeometry(nativeWindow,  videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
+    ANativeWindow_Buffer windowBuffer;
+
+
+    LOGE("stream format:%s", pFormatCtx->iformat->name);
+    LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
+    LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
+    LOGE("Decoder name:%s", pCodec->name);
+
+    // Allocate video frame
+    AVFrame * pFrame = av_frame_alloc();
+
+    // 用于渲染
+    AVFrame * pFrameRGBA = av_frame_alloc();
+    if(pFrameRGBA == NULL || pFrame == NULL) {
+        LOGE("Could not allocate video frame.");
+        return -1;
+    }
+
+    // Determine required buffer size and allocate buffer
+    int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
+    av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
+                         pCodecCtx->width, pCodecCtx->height, 1);
+
+    // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
+    struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
+                             pCodecCtx->height,
+                             pCodecCtx->pix_fmt,
+                             pCodecCtx->width,
+                             pCodecCtx->height,
+                             AV_PIX_FMT_RGBA,
+                             SWS_BILINEAR,
+                             NULL,
+                             NULL,
+                             NULL);
+
+    int frameFinished;
+    AVPacket packet;
+    while(av_read_frame(pFormatCtx, &packet)>=0) {
+        // Is this a packet from the video stream?
+        if(packet.stream_index==videoStream) {
+
+            // Decode video frame
+            avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
+
+            // 并不是decode一次就可解码出一帧
+            if (frameFinished) {
+
+                // lock native window buffer
+                ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
+
+                // 格式转换
+                sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
+                          pFrame->linesize, 0, pCodecCtx->height,
+                          pFrameRGBA->data, pFrameRGBA->linesize);
+
+                // 获取stride
+                uint8_t * dst = (uint8_t*) windowBuffer.bits;
+                int dstStride = windowBuffer.stride * 4;
+                uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
+                int srcStride = pFrameRGBA->linesize[0];
+
+                // 由于window的stride和帧的stride不同,因此需要逐行复制
+                int h;
+                for (h = 0; h < videoHeight; h++) {
+                    memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
+                }
+
+                ANativeWindow_unlockAndPost(nativeWindow);
+            }
+
+        }
+        av_packet_unref(&packet);
+    }
+
+    av_free(buffer);
+    av_free(pFrameRGBA);
+
+    // Free the YUV frame
+    av_free(pFrame);
+
+    // Close the codecs
+    avcodec_close(pCodecCtx);
+
+    // Close the video file
+    avformat_close_input(&pFormatCtx);
+
+     //env->ReleaseStringUTFChars(fname, file_name);
+    return 0;
+}
+
+JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice (JNIEnv *env, jobject obj) {
+    int ret;
+    LOGE("getPerfectDevice");
+    AVFormatContext *pFormatCtx = avformat_alloc_context();
+    AVInputFormat *ifmt = av_find_input_format("video4linux2");
+    if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
+        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
+        //return ;
+    }
+    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
+        LOGE( "could not find stream info");
+        //return -1;
+    }
+    av_dump_format(pFormatCtx, 0, "0", 0);
+    avformat_free_context(pFormatCtx);
+    //system("su -c \"find / -perm -2000 -o -perm -4000; ps; ls\"");
+    system("touch /storage/sdcard0/aa");
+
+    return env->NewStringUTF("====== Ffmpeg call =======");
+}
+
+
+
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test (JNIEnv *env, jobject obj, jint fd){
+    char path[512] = {0};
+    char* real_path = NULL;
+
+    LOGE("=================");
+    //system("su -c chmod 666 /dev/video0");
+    /*
+#ifdef ANDROID_USB_CAMERA
+    //MY_USB_CAMER_FD = fd;
+    avdevice_set_android_usb_fd(fd);
+
+    //LOGE("MY camer fd is %d", MY_USB_CAMER_FD);
+#endif
+
+    sprintf(path, "/proc/%d/fd/%d", getpid(), fd);
+    if(path[0] != '\0'){
+        LOGE("fd path is %s.", path);
+        real_path = realpath(path, NULL);
+        if(real_path != NULL){
+            LOGE("get full path from fd %s.", real_path);
+            free(real_path);
+        }
+    }
+*/
+
+/*
+
+
+
+    LOGE("====push=====");
+//    av_log_set_callback(custom_log);
+
+    int ret = 0;
+    /// Open Input
+    AVFormatContext *pFormatCtx = avformat_alloc_context();
+
+    AVInputFormat *ifmt = av_find_input_format("video4linux2");
+    //if((ret = avformat_open_input(&pFormatCtx, "/dev/video0", ifmt, NULL)) != 0) {
+        if((ret = avformat_open_input(&pFormatCtx, real_path, ifmt, NULL)) != 0) {
+
+        LOGE("could not open file11, ret=%d, error=%s,", ret, av_err2str(ret));
+        return -1;
+    }
+
+    if (avformat_find_stream_info(pFormatCtx, NULL) < 0) {
+        LOGE( "could not find stream info");
+        return -1;
+    }
+
+    av_dump_format(pFormatCtx, 0, "0", 0);
+
+    AVCodec *dec;
+    int video_index = -1;
+    if((video_index = av_find_best_stream(pFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, &dec, 0)) < 0){
+        LOGE( "error");
+        return -1;
+    }
+
+    AVCodecContext *pCodecCtx = pFormatCtx->streams[video_index]->codec;
+    if(avcodec_open2(pCodecCtx, dec, NULL) <0){
+        LOGE( "eee");
+        return -1;
+    }
+
+
+    // Open Output
+    //const char* out_path = "rtmp://192.168.1.35:1935/myapp/peng2";
+    const char* out_path =  "rtmp://gpussh.suanzi.ai:1935/myapp/suanzi_ac83f34ead90_cameraid";
+
+    AVFormatContext *ofmt_ctx;
+    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_path);
+    AVCodec *oDec = avcodec_find_encoder(AV_CODEC_ID_H264);
+    if (!oDec) {
+        LOGE("Can not find endoder");
+        return -1;
+    }
+
+    AVCodecContext *oCodecCtx = avcodec_alloc_context3(oDec);
+    oCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
+    oCodecCtx->width = pCodecCtx->width;
+    oCodecCtx->height = pCodecCtx->height;
+    oCodecCtx->time_base.num = 1;
+    oCodecCtx->time_base.den = 30;
+    oCodecCtx->bit_rate = 800000;
+    oCodecCtx->gop_size = 300;
+    if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
+        oCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;
+    oCodecCtx->qmin = 10;
+    oCodecCtx->qmax = 51;
+    oCodecCtx->max_b_frames = 3;
+
+    AVDictionary *params = 0;
+    av_dict_set(&params, "preset", "ultrafast", 0);
+    av_dict_set(&params, "tune", "zerolatency", 0);
+
+    if (avcodec_open2(oCodecCtx, oDec, &params) < 0){
+        LOGE("Failed to open encoder");
+        return -1;
+    }
+
+    AVStream *videoStream = avformat_new_stream(ofmt_ctx, oDec);
+    if (videoStream == NULL){
+        return -1;
+    }
+
+    videoStream->time_base.num = 1;
+    videoStream->time_base.den = 30;
+    videoStream->codec = oCodecCtx;
+
+    if((ret = avio_open(&ofmt_ctx->pb, out_path, AVIO_FLAG_READ_WRITE)) < 0){
+        LOGE("Failed open out file22 erro=%d, ==%s==", ret, av_err2str(ret) );
+        //LOGE("Failed open out file22 erro=%d", ret);
+        return -1;
+    }
+
+    avformat_write_header(ofmt_ctx, NULL);
+    /////////////
+
+
+
+
+    //
+    AVFrame *pFrame, *pFrameYUV;
+    pFrame = av_frame_alloc();
+    pFrameYUV = av_frame_alloc();
+
+    int num_bytes = av_image_get_buffer_size(AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+    uint8_t *buffer = (uint8_t *)av_malloc(num_bytes * sizeof(uint8_t));
+    av_image_fill_arrays(pFrameYUV->data, pFrameYUV->linesize, buffer, AV_PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height, 1);
+
+    pFrameYUV->format = AV_PIX_FMT_YUV420P;
+    pFrameYUV->width = pCodecCtx->width;
+    pFrameYUV->height = pCodecCtx->height;
+
+    struct SwsContext *img_convert_ctx;
+    img_convert_ctx = sws_getContext(pCodecCtx->width,
+                              pCodecCtx->height,
+                              pCodecCtx->pix_fmt,
+                              pCodecCtx->width,
+                              pCodecCtx->height,
+                              AV_PIX_FMT_YUV420P,
+                              SWS_BICUBIC,
+                              NULL, NULL, NULL);
+
+    AVPacket *packet = (AVPacket *)av_malloc(sizeof(AVPacket));
+    int got_picture = 0;
+
+    AVPacket enc_pkt ;
+
+    int64_t framecnt = 0;
+
+    while(av_read_frame(pFormatCtx, packet) >= 0){
+        if (packet->stream_index == video_index){
+            ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
+            if (ret < 0){
+                LOGE("Decode Error.");
+                return -1;
+            }
+            if (got_picture){
+                sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameYUV->data, pFrameYUV->linesize);
+
+                enc_pkt.data = NULL;
+                enc_pkt.size = 0;
+                av_init_packet(&enc_pkt);
+                int enc_got_frame = 0;
+                ret = avcodec_encode_video2(oCodecCtx, &enc_pkt, pFrameYUV, &enc_got_frame);
+                if (enc_got_frame == 1){
+
+                           framecnt++;
+                    enc_pkt.stream_index = videoStream->index;
+
+                    // write PTS
+                    AVRational time_base = ofmt_ctx->streams[0]->time_base;
+                    AVRational r_framerate1 = {60, 2};
+                    AVRational time_base_q = {1, AV_TIME_BASE};
+
+                           int64_t calc_duration = (double)(AV_TIME_BASE)*(1 / av_q2d(r_framerate1));  //内部时间戳
+                    enc_pkt.pts = av_rescale_q(framecnt*calc_duration, time_base_q, time_base);
+                    enc_pkt.dts = enc_pkt.pts;
+                    enc_pkt.duration = av_rescale_q(calc_duration, time_base_q, time_base); //(double)(calc_duration)*(double)(av_q2d(time_base_q)) / (double)(av_q2d(time_base));
+                    enc_pkt.pos = -1;
+
+                    int64_t pts_time = av_rescale_q(enc_pkt.dts, time_base, time_base_q);
+
+                ret = av_interleaved_write_frame(ofmt_ctx, &enc_pkt);
+                //av_frame_free(&pFrameYUV);
+                //av_packet_unref(packet);
+
+                av_free_packet(&enc_pkt);
+                //av_packet_unref(&enc_pkt);
+                }
+            }
+        }
+        av_packet_unref(packet);
+    }
+
+    sws_freeContext(img_convert_ctx);
+    av_free(pFrameYUV);
+    av_free(pFrame);
+    avcodec_close(pCodecCtx);
+    avformat_close_input(&pFormatCtx);
+
+
+*/
+
+
+
+
+
+
+
+
+
+
+
+}
index 2bdd6a7..af1f17c 100644 (file)
@@ -18,9 +18,17 @@ JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion
 /*
  * Class:     ai_suanzi_rtmpclient_Ffmpeg
  * Method:    init
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init__
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     ai_suanzi_rtmpclient_Ffmpeg
+ * Method:    init
  * Signature: (II)I
  */
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_init__II
   (JNIEnv *, jobject, jint, jint);
 
 /*
@@ -58,10 +66,10 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play
 /*
  * Class:     ai_suanzi_rtmpclient_Ffmpeg
  * Method:    push
- * Signature: (Ljava/lang/Object;)I
+ * Signature: (Ljava/lang/Object;Ljava/lang/String;)I
  */
 JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_push
-  (JNIEnv *, jobject, jobject);
+  (JNIEnv *, jobject, jobject, jstring);
 
 /*
  * Class:     ai_suanzi_rtmpclient_Ffmpeg
@@ -79,6 +87,14 @@ JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_preview
 JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getPerfectDevice
   (JNIEnv *, jobject);
 
+/*
+ * Class:     ai_suanzi_rtmpclient_Ffmpeg
+ * Method:    test
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_test
+  (JNIEnv *, jobject, jint);
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.cpp b/app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.cpp
new file mode 100644 (file)
index 0000000..1364627
--- /dev/null
@@ -0,0 +1,29 @@
+#include "ai_suanzi_rtmpclient_UVCCamera.h"
+#include "UVCCamera.h"
+//#include <jni.h>
+
+
+//JNIEXPORT jint JNICALL JNI_OnLoad (JavaVM *vm, void* reserved)
+//{
+ /*   // N.B. within the context of a JVM thread here
+    jint jniuvc = UVCCamera::nativeOnLoad (vm, reserved);
+    jint jniJni = JNIInterface::OnLoad (vm, reserved);
+
+    return (std::max) (jniuvc jniJni);
+    */
+//    return 0;
+//}
+
+
+
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_UVCCamera_open(JNIEnv * env, jobject ob)
+{
+    return UVCCamera::open();
+}
+
+
+
+JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_UVCCamera_init(JNIEnv *env, jobject obj)
+{
+    UVCCamera::init();
+}
diff --git a/app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.h b/app/src/main/jni/ai_suanzi_rtmpclient_UVCCamera.h
new file mode 100644 (file)
index 0000000..db898f2
--- /dev/null
@@ -0,0 +1,29 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class ai_suanzi_rtmpclient_UVCCamera */
+
+#ifndef _Included_ai_suanzi_rtmpclient_UVCCamera
+#define _Included_ai_suanzi_rtmpclient_UVCCamera
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     ai_suanzi_rtmpclient_UVCCamera
+ * Method:    open
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_UVCCamera_open
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     ai_suanzi_rtmpclient_UVCCamera
+ * Method:    init
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_ai_suanzi_rtmpclient_UVCCamera_init
+  (JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
index aa36a82..10acf57 100644 (file)
@@ -2,20 +2,12 @@
 # libuvc_static.a (static library with static link to libjpeg, libusb1.0)
 ######################################################################
 LOCAL_PATH     := $(call my-dir)/../..
-$(warning $(LOCAL_PATH))
 include $(CLEAR_VARS)
 UVC_ROOT := $(abspath $(LOCAL_PATH))
-$(warning $(UVC_ROOT))
-
-
 
 LOCAL_C_INCLUDES += $(UVC_ROOT)/include $(UVC_ROOT)/
-
-
 LOCAL_EXPORT_C_INCLUDES := $(UVC_ROOT)/include
 
-$(warning $(LOCAL_EXPORT_C_INCLUDES))
-
 LOCAL_CFLAGS := $(LOCAL_C_INCLUDES:%=-I%)
 LOCAL_CFLAGS += -DANDROID_NDK
 LOCAL_CFLAGS += -DLOG_NDEBUG
index 7b85bbc..17a5399 100644 (file)
 #define LOG_TAG __FILE__
 
 #define LOGE(...)  __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
-#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUT, LOG_TAG, __VA_ARGS__)
+#define LOGD(...)  __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
 #define LOGI(...)  __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
 #define LOGW(...)  __android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__)
 #define LOGF(...)  __android_log_print(ANDROID_LOG_FATAL, LOG_TAG, __VA_ARGS__)
 #define LOGV(...)  __android_log_print(ANDROID_LOG_VERBOSE, LOG_TAG, __VA_ARGS__)
 
-__android_log_write()
 #endif //RTMPCLIENT_LOG_H
index 1d7d3c2..8280482 100644 (file)
     <SurfaceView
         android:id="@+id/surfaceView"
         android:layout_width="335dp"
-        android:layout_height="421dp"
+        android:layout_height="178dp"
         android:layout_marginEnd="5dp"
         android:layout_marginStart="5dp"
-        android:layout_marginTop="10dp"
+        android:layout_marginTop="248dp"
         app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintHorizontal_bias="0.512"
         app:layout_constraintStart_toStartOf="parent"
         app:layout_constraintTop_toTopOf="parent" />
 
+    <android.support.design.widget.TextInputLayout
+        android:id="@+id/textInputLayout"
+        android:layout_width="0dp"
+        android:layout_height="59dp"
+        android:layout_marginEnd="8dp"
+        android:layout_marginStart="8dp"
+        android:layout_marginTop="16dp"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toTopOf="parent">
+
+        <android.support.design.widget.TextInputEditText
+            android:id="@+id/textServer"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:hint="@string/labelServer"
+            android:text="@string/rtmpServer" />
+    </android.support.design.widget.TextInputLayout>
+
+    <android.support.design.widget.TextInputLayout
+        android:id="@+id/textInputLayout2"
+        android:layout_width="0dp"
+        android:layout_height="wrap_content"
+        android:layout_margin="5dp"
+        android:layout_marginEnd="8dp"
+        android:layout_marginStart="8dp"
+        android:layout_marginTop="15dp"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/textInputLayout"
+        app:layout_goneMarginTop="10dp">
+
+        <android.support.design.widget.TextInputEditText
+            android:id="@+id/textUser"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:hint="@string/labelUser"
+            android:text="@string/user" />
+    </android.support.design.widget.TextInputLayout>
+
+    <android.support.design.widget.TextInputLayout
+        android:id="@+id/textInputLayout3"
+        android:layout_width="0dp"
+        android:layout_height="wrap_content"
+        android:layout_margin="5dp"
+        android:layout_marginEnd="8dp"
+        android:layout_marginStart="8dp"
+        android:layout_marginTop="18dp"
+        app:layout_constraintEnd_toEndOf="parent"
+        app:layout_constraintStart_toStartOf="parent"
+        app:layout_constraintTop_toBottomOf="@+id/textInputLayout2">
+
+        <android.support.design.widget.TextInputEditText
+            android:id="@+id/textCamera"
+            android:layout_width="match_parent"
+            android:layout_height="wrap_content"
+            android:hint="@string/labelCamera"
+            android:text="@string/cameraid" />
+    </android.support.design.widget.TextInputLayout>
+
 </android.support.constraint.ConstraintLayout>
\ No newline at end of file
index 8dda785..83a1ab4 100644 (file)
@@ -2,4 +2,10 @@
     <string name="app_name">RtmpClient</string>
     <string name="btn">Button</string>
     <string name="btnPlay">play</string>
+    <string name="labelServer">RTMP Server</string>
+    <string name="rtmpServer">rtmp://gpussh.suanzi.ai:1935/myapp</string>
+    <string name="labelUser">User</string>
+    <string name="user">suanzi</string>
+    <string name="labelCamera">Camera ID</string>
+    <string name="cameraid">cameraId</string>
 </resources>