compileSdkVersion 27
defaultConfig {
applicationId "ai.suanzi.rtmpclient"
- minSdkVersion 21
- targetSdkVersion 21
+ minSdkVersion 17
+ targetSdkVersion 17
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
public class Ffmpeg {
static {
- System.loadLibrary("x264");
- System.loadLibrary("postproc");
+ String arch = System.getProperty("os.arch");
+ if (arch.equals("i686")){
+ System.loadLibrary("x264");
+ }
System.loadLibrary("avutil");
+
+ System.loadLibrary("postproc");
+
System.loadLibrary("swresample");
System.loadLibrary("swscale");
System.loadLibrary("avcodec");
import android.os.AsyncTask;
import java.util.List;
import java.io.IOException;
+import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
}
});
- this.mCamera = getCameraInstance();
+ /*this.mCamera = getCameraInstance();
if(checkCameraHardware(this)) Log.e(TAG, "has cameras: " + Camera.getNumberOfCameras());
-
+*/
final SurfaceView surfaceView = findViewById(R.id.surfaceView);
mHolder = surfaceView.getHolder();
mHolder.addCallback(this);
//
- mCamera.setPreviewCallback(this);
+ //mCamera.setPreviewCallback(this);
/*try{
mCamera.setPreviewTexture(st);
e.printStackTrace();
}*/
+ final Button btn2 = findViewById(R.id.button2);
+ btn2.setOnClickListener(new View.OnClickListener(){
+ @Override
+ public void onClick(View view){
+ Log.e(TAG, "onclick2");
+ ffmpeg.play(mHolder.getSurface(),"/storage/sdcard0/output.flv");
+
+ }
+
+ });
+
+ Log.e(TAG, "onclick2");
+ //ffmpeg.play(mHolder.getSurface(),"/storage/emulated/0/Movies/output.flv");
+ //ffmpeg.play(mHolder.getSurface(),"/data/local/tmp/big_buck_bunny_720p_10mb.mp4");
+
+
+ btn2.post(new Runnable(){
+ @Override
+ public void run() {
+ btn2.performClick();
+ }
+ });
+
+
}
- SurfaceTexture st = new SurfaceTexture(0);
+ //SurfaceTexture st = new SurfaceTexture(0);
@Override
@Override
public void surfaceCreated(final SurfaceHolder holder){
Log.e(TAG,"SurfacedCreated");
- try {
+ /*try {
mCamera.setPreviewDisplay(holder);
- //mCamera.startPreview();
+ mCamera.startPreview();
} catch (IOException e) {
Log.d(TAG, "Error setting camera preview: " + e.getMessage());
- }
+ }*/
}
@Override
//parameters.setPreviewSize(640,480);
//parameters.setPictureSize(640,480);
//mCamera.setParameters(parameters);
+
+ //try{
+ //Thread.sleep(10000);
+ /*Log.e(TAG, "xxxxxxxxxxxxxx");
+ mExecutor.execute(new Runnable() {
+ @Override
+ public void run() {
+ mCamera.startPreview();
+ final Button btn = findViewById(R.id.button);
+ Camera.Parameters params = mCamera.getParameters();
+ ffmpeg.init(params.getPictureSize().width, params.getPictureSize().height);
+ }
+ });*/
+
+ //btn.performClick();
+ //} catch (InterruptedException e){
+ // e.printStackTrace();
+ //}
+
+
+
+ //Camera.Parameters params = mCamera.getParameters();
+ //ffmpeg.init(params.getPictureSize().width, params.getPictureSize().height);
+ /*Thread t = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ ffmpeg.play(mHolder.getSurface(),"/storage/emulated/0/Movies/output.flv");
+
+ }
+ });*/
+
}
@Override
mStreamTask.execute((Void)null);*/
- long endTime = System.currentTimeMillis();
+ /*long endTime = System.currentTimeMillis();
mExecutor.execute(new Runnable() {
@Override
public void run() {
});
Log.e(TAG, "采集第:" + (++count) + "帧,距上一帧间隔时间:"
+ (endTime - previewTime) + " " + Thread.currentThread().getName());
- previewTime = endTime;
+ previewTime = endTime;*/
}
c = Camera.open(0);
Camera.Parameters params = c.getParameters();
Log.e(TAG, "Camera parameters: " + params.getPreviewSize().width + "x" + params.getPreviewSize().height);
- // NV21 : 17, JPEG 256
Log.e(TAG, "Preview format (17 is NV21): " + params.getPreviewFormat() + ". Picture format(256 is JPEG): " + params.getPictureFormat());
List<int[]> fps = params.getSupportedPreviewFpsRange();
for(int[] i : fps){
Camera.Parameters parameters = c.getParameters();
- //List<Camera.Size> sizes = parameters.getSupportedPreviewSizes();
+
List<Camera.Size> sizes = parameters.getSupportedPictureSizes();
for (Camera.Size cc : sizes){
Log.e(TAG, "=== width: " + cc.width + ". height:" + cc.height);
}
Camera.Size cs = sizes.get(3);
- params.setPreviewSize(cs.width, cs.height);
+ //params.setPreviewSize(cs.width, cs.height);
params.setPictureSize(cs.width, cs.height);
- params.setPictureFormat(ImageFormat.NV21);
+ //params.setPictureFormat(ImageFormat.NV21);
c.setParameters(params);
}
catch (Exception e){
# avdevice
include $(CLEAR_VARS)
LOCAL_MODULE:= libavdevice
-LOCAL_SRC_FILES:= lib/libavdevice.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libavdevice.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# avcodec
include $(CLEAR_VARS)
LOCAL_MODULE:= libavcodec
-LOCAL_SRC_FILES:= lib/libavcodec.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libavcodec.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# avformat
include $(CLEAR_VARS)
LOCAL_MODULE:= libavformat
-LOCAL_SRC_FILES:= lib/libavformat.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libavformat.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# avfilter
include $(CLEAR_VARS)
LOCAL_MODULE:= libavfilter
-LOCAL_SRC_FILES:= lib/libavfilter.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libavfilter.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# swresample
include $(CLEAR_VARS)
LOCAL_MODULE:= libswresample
-LOCAL_SRC_FILES:= lib/libswresample.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libswresample.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# swscale
include $(CLEAR_VARS)
LOCAL_MODULE:= libswscale
-LOCAL_SRC_FILES:= lib/libswscale.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libswscale.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
# avutil
include $(CLEAR_VARS)
LOCAL_MODULE:= libavutil
-LOCAL_SRC_FILES:= lib/libavutil.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libavutil.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
+ifeq ($(TARGET_ARCH),x86)
# x264
include $(CLEAR_VARS)
LOCAL_MODULE:= libx264
-LOCAL_SRC_FILES:= lib/libx264.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libx264.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
+endif
# postproc
include $(CLEAR_VARS)
LOCAL_MODULE:= libpostproc
-LOCAL_SRC_FILES:= lib/libpostproc.so
+LOCAL_SRC_FILES:= lib/$(TARGET_ARCH)/libpostproc.so
LOCAL_EXPORT_C_INCLUDES:= $(LOCAL_PATH)/include
include $(PREBUILT_SHARED_LIBRARY)
+
# ffmpegjni
include $(CLEAR_VARS)
LOCAL_MODULE := ffmpeg-jni
LOCAL_SRC_FILES := ai_suanzi_rtmpclient_Ffmpeg.cpp
+
+ifeq ($(TARGET_ARCH),x86)
LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc x264
+else
+LOCAL_SHARED_LIBRARIES := avdevice avcodec avformat avfilter swresample swscale avutil postproc
+endif
+
+LOCAL_CFLAGS := -D__ANDROID_API__=21
LOCAL_LDLIBS :=-llog -landroid
include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
-APP_ABI := x86
\ No newline at end of file
+APP_ABI := armeabi-v7a x86
\ No newline at end of file
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
#include "libavutil/time.h"
+ #include "libavdevice/avdevice.h"
}
int64_t start_time;
AVFrame *pFrameYUV;
+void custom_log(void *ptr, int level, const char* fmt, va_list vl){
+
+ //To TXT file
+
+ /*FILE *fp=fopen("/storage/emulated/0/av_log.txt","a+");
+ if(fp){
+ vfprintf(fp,fmt,vl);
+ fflush(fp);
+ fclose(fp);
+ } */
+
+
+ //To Logcat
+ LOGE(fmt, vl);
+}
+
+
int framecnt = 0;
int yuv_width;
int yuv_height;
int y_length;
int uv_length;
+
+
JNIEXPORT jstring JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_getVersion (JNIEnv *env, jobject obj) {
jint v = avformat_version();
LOGE("######### Ffmpeg JNI version i= %d", v);
+
+
+ /*AVFormatContext *pFormatCtx = avformat_alloc_context();
+ avdevice_register_all();
+ av_log_set_callback(custom_log);
+ AVInputFormat *ifmt=av_find_input_format("video4linux2");
+ LOGE("===%s===", ifmt->name);
+ if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+ LOGE("Couldn't open input stream.\n");
+ return env->NewStringUTF("===== error =======");
+
+ //return -1;
+ }*/
+
return env->NewStringUTF("====== Ffmpeg call =======");
}
//const char* out_path = "/storage/emulated/0/Movies/output.flv";
- const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
+ //const char* out_path = "rtmp://192.168.1.35:1935/myapp/suanzi";
+ const char* out_path = "/storage/sdcard0/output.flv";
- LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
+ LOGE("Ffmpeg init, width=%d, heigh=%d", width, height);
yuv_width=width;
yuv_height=height;
return 0;
}
-JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject obj2, jstring fname){
+JNIEXPORT jint JNICALL Java_ai_suanzi_rtmpclient_Ffmpeg_play (JNIEnv *env, jobject obj, jobject surface, jstring fname){
+
+
+
+
+
+ LOGE("###### video play #####");
+ // char * file_name = "/storage/emulated/0/Movies/big_buck_bunny_720p_10mb.mp4";
+ const char * file_name = env->GetStringUTFChars(fname, 0);
+
+ av_register_all();
+ avdevice_register_all();
+
+
+ AVFormatContext * pFormatCtx = avformat_alloc_context();
+
+
+//////////
+ av_log_set_callback(custom_log);
+
+ AVInputFormat *ifmt=av_find_input_format("video4linux2");
+ LOGE("===%s===", ifmt->name);
+ if(avformat_open_input(&pFormatCtx,"/dev/video0",ifmt,NULL)!=0){
+ LOGE("Couldn't open file:\n");
+ return -1; // Couldn't open file
+ }
+
+
+///////////
+
+/*
+ // Open video file
+ if(avformat_open_input(&pFormatCtx, file_name, NULL, NULL)!=0) {
+
+ LOGE("Couldn't open file:%s\n", file_name);
+ return -1; // Couldn't open file
+ }
+*/
+ // Retrieve stream information
+ if(avformat_find_stream_info(pFormatCtx, NULL)<0) {
+ LOGE("Couldn't find stream information.");
+ return -1;
+ }
+
+ // Find the first video stream
+ int videoStream = -1, i;
+ for (i = 0; i < pFormatCtx->nb_streams; i++) {
+ if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
+ && videoStream < 0) {
+ videoStream = i;
+ }
+ }
+ if(videoStream==-1) {
+ LOGE("Didn't find a video stream.");
+ return -1; // Didn't find a video stream
+ }
+
+ // Get a pointer to the codec context for the video stream
+ AVCodecContext * pCodecCtx = pFormatCtx->streams[videoStream]->codec;
+LOGE("============= %d ========",__LINE__);
+ // Find the decoder for the video stream
+ AVCodec * pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
+ if(pCodec==NULL) {
+ LOGE("Codec not found.");
+ return -1; // Codec not found
+ }
+
+ if(avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
+ LOGE("Could not open codec.");
+ return -1; // Could not open codec
+ }
+
+ // 获取native window
+ ANativeWindow* nativeWindow = ANativeWindow_fromSurface(env, surface);
+
+ // 获取视频宽高
+ int videoWidth = pCodecCtx->width;
+ int videoHeight = pCodecCtx->height;
+
+ // 设置native window的buffer大小,可自动拉伸
+ ANativeWindow_setBuffersGeometry(nativeWindow, videoWidth, videoHeight, WINDOW_FORMAT_RGBA_8888);
+ ANativeWindow_Buffer windowBuffer;
+
+ if(avcodec_open2(pCodecCtx, pCodec, NULL)<0) {
+ LOGE("Could not open codec.");
+ return -1; // Could not open codec
+ }
+
+ LOGE("stream format:%s", pFormatCtx->iformat->name);
+ LOGE("duration :%lld", (pFormatCtx->duration) / 1000000);
+ LOGE("Width, Height:%d x %d", pCodecCtx->width, pCodecCtx->height);
+ LOGE("Decoder name:%s", pCodec->name);
+
+ // Allocate video frame
+ AVFrame * pFrame = av_frame_alloc();
+
+ // 用于渲染
+ AVFrame * pFrameRGBA = av_frame_alloc();
+ if(pFrameRGBA == NULL || pFrame == NULL) {
+ LOGE("Could not allocate video frame.");
+ return -1;
+ }
+
+ // Determine required buffer size and allocate buffer
+ int numBytes=av_image_get_buffer_size(AV_PIX_FMT_RGBA, pCodecCtx->width, pCodecCtx->height, 1);
+ uint8_t * buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
+ av_image_fill_arrays(pFrameRGBA->data, pFrameRGBA->linesize, buffer, AV_PIX_FMT_RGBA,
+ pCodecCtx->width, pCodecCtx->height, 1);
+
+ // 由于解码出来的帧格式不是RGBA的,在渲染之前需要进行格式转换
+ struct SwsContext *sws_ctx = sws_getContext(pCodecCtx->width,
+ pCodecCtx->height,
+ pCodecCtx->pix_fmt,
+ pCodecCtx->width,
+ pCodecCtx->height,
+ AV_PIX_FMT_RGBA,
+ SWS_BILINEAR,
+ NULL,
+ NULL,
+ NULL);
+
+ int frameFinished;
+ AVPacket packet;
+ while(av_read_frame(pFormatCtx, &packet)>=0) {
+ // Is this a packet from the video stream?
+ if(packet.stream_index==videoStream) {
+
+ // Decode video frame
+ avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
+
+ // 并不是decode一次就可解码出一帧
+ if (frameFinished) {
+
+ // lock native window buffer
+ ANativeWindow_lock(nativeWindow, &windowBuffer, 0);
+
+ // 格式转换
+ sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data,
+ pFrame->linesize, 0, pCodecCtx->height,
+ pFrameRGBA->data, pFrameRGBA->linesize);
+
+ // 获取stride
+ uint8_t * dst = (uint8_t*) windowBuffer.bits;
+ int dstStride = windowBuffer.stride * 4;
+ uint8_t * src = (uint8_t*) (pFrameRGBA->data[0]);
+ int srcStride = pFrameRGBA->linesize[0];
+
+ // 由于window的stride和帧的stride不同,因此需要逐行复制
+ int h;
+ for (h = 0; h < videoHeight; h++) {
+ memcpy(dst + h * dstStride, src + h * srcStride, srcStride);
+ }
+
+ ANativeWindow_unlockAndPost(nativeWindow);
+ }
+
+ }
+ av_packet_unref(&packet);
+ }
+
+ av_free(buffer);
+ av_free(pFrameRGBA);
+
+ // Free the YUV frame
+ av_free(pFrame);
+
+ // Close the codecs
+ avcodec_close(pCodecCtx);
+
+ // Close the video file
+ avformat_close_input(&pFormatCtx);
+
+ env->ReleaseStringUTFChars(fname, file_name);
return 0;
}
\ No newline at end of file
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
+ <Button
+ android:id="@+id/button2"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_marginBottom="38dp"
+ android:text="@string/btnPlay"
+ app:layout_constraintBottom_toTopOf="@+id/button"
+ tools:layout_editor_absoluteX="147dp" />
+
</android.support.constraint.ConstraintLayout>
\ No newline at end of file
<resources>
<string name="app_name">RtmpClient</string>
<string name="btn">Button</string>
+ <string name="btnPlay">play</string>
</resources>