JavaCV - 以纵向模式录制视频

时间:2016-02-24 08:13:23

标签: android video javacv

我正在尝试使用JavaCV直播到RTMP nginx服务器。我正在使用这个例子: https://github.com/bytedeco/javacv/blob/master/samples/RecordActivity.java

我想使用该示例在全屏纵向模式下直播。我尝试改变一些事情,但视频结果变得像this

这是我对布局,记录器和FFmpegFrameFilter的初始化:

private int imageWidth = 720;
private int imageHeight = 1280;

private void initFilter() {
        filter = new FFmpegFrameFilter("transpose=clock", imageWidth-1, imageHeight-1);
        filter.setPixelFormat(avutil.AV_PIX_FMT_NV21); // default camera format on Android
        try {
            filter.start();
        } catch (FrameFilter.Exception e) {
            e.printStackTrace();
        }
    }

private void initLayout() {
        /* get size of screen */
        Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
        screenWidth = display.getWidth();
        screenHeight = display.getHeight();

        RelativeLayout.LayoutParams layoutParam = null;
        LayoutInflater myInflate = null;
        myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
        topLayout = (FrameLayout)findViewById(R.id.record_layout);
        RelativeLayout preViewLayout = (RelativeLayout) myInflate.inflate(R.layout.activity_main, null);
        layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
        topLayout.addView(preViewLayout, layoutParam);

        /* add camera view */

        layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);

        cameraDevice = Camera.open();
        Log.i(LOG_TAG, "cameara open");
        cameraView = new CameraView(this, cameraDevice);
        topLayout.addView(cameraView, layoutParam);
        Log.i(LOG_TAG, "cameara preview start: OK");
    }

private void initRecorder() {

        Log.w(LOG_TAG, "init recorder");

        if (RECORD_LENGTH > 0) {
            imagesIndex = 0;
            images = new Frame[RECORD_LENGTH * frameRate];
            timestamps = new long[images.length];
            for (int i = 0; i < images.length; i++) {
                images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
                timestamps[i] = -1;
            }
        } else if (yuvImage == null) {
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            Log.i(LOG_TAG, "create yuvImage");
        }

        Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
        recorder.setFormat("flv");
        recorder.setSampleRate(sampleAudioRateInHz);
        // Set in the surface changed method
        recorder.setFrameRate(frameRate);

        Log.i(LOG_TAG, "recorder initialize success");

        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
        runAudioThread = true;
    }

这是surfaceView的代码

class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

        private SurfaceHolder mHolder;
        private Camera mCamera;

        public void addCallback()
        {
            mHolder.addCallback(CameraView.this);
        }

        public void removeCallback()
        {
            mHolder.removeCallback(CameraView.this);
        }

        public CameraView(Context context, Camera camera) {
            super(context);
            Log.w("camera","camera view");
            mCamera = camera;
            mHolder = getHolder();
            mHolder.addCallback(CameraView.this);
            mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
            mCamera.setPreviewCallback(CameraView.this);
        }

        @Override
        public void surfaceCreated(SurfaceHolder holder) {
            Log.v("StreamActivity","Surface Created");
            if (!paused) {
                try {
                    stopPreview();
                    mCamera.setPreviewDisplay(holder);
                    mCamera.setDisplayOrientation(90);
                } catch (IOException exception) {
                    mCamera.release();
                    mCamera = null;
                }
            }
        }

        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
            Log.v("StreamActivity", "surfaceChanged");
            stopPreview();

            Camera.Parameters camParams = mCamera.getParameters();
            List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
            // Sort the list in ascending order
            Collections.sort(sizes, new Comparator<Camera.Size>() {

                public int compare(final Camera.Size a, final Camera.Size b) {
                    return a.width * a.height - b.width * b.height;
                }
            });
            int imageWidth2 = imageHeight;
            int imageHeight2 = imageWidth;
            // Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
            // reach the initial settings of imageWidth/imageHeight.
            for (int i = 0; i < sizes.size(); i++) {
                if ((sizes.get(i).width >= imageWidth2 && sizes.get(i).height >= imageHeight2) || i == sizes.size() - 1) {
                    imageWidth2 = sizes.get(i).width;
                    imageHeight2 = sizes.get(i).height;
                    Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth2 + "x" + imageHeight2);
                    break;
                }
            }

            //
            // camParams.set("rotation",90);

            camParams.setPreviewSize(imageWidth2, imageHeight2);
            camParams.setPreviewFrameRate(frameRate);
            mCamera.setParameters(camParams);

            // Set the holder (which might have changed) again
            try {
                mCamera.setPreviewDisplay(holder);
                mCamera.setPreviewCallback(CameraView.this);
                startPreview();
            } catch (Exception e) {
                Log.e(LOG_TAG, "Could not set preview display in surfaceChanged");
            }
        }

        @Override
        public void surfaceDestroyed(SurfaceHolder holder) {
            Log.v("StreamActivity","Surface Destroyed");
            try {
                mHolder.addCallback(null);
                mCamera.setPreviewCallback(null);
            } catch (RuntimeException e) {
                // The camera has probably just been released, ignore.
            }
        }

        public void startPreview() {
            if (!isPreviewOn && mCamera != null) {
                isPreviewOn = true;
                mCamera.startPreview();
            }
        }

        public void stopPreview() {
            if (isPreviewOn && mCamera != null) {
                isPreviewOn = false;
                mCamera.stopPreview();
            }
        }

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            //Log.v("StreamActivity","onPreviewFrame");
            if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
                startTime = System.currentTimeMillis();
                return;
            }
            if (RECORD_LENGTH > 0) {
                int i = imagesIndex++ % images.length;
                yuvImage = images[i];
                timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
            }
            /* get video data */
            if (yuvImage != null && recording) {
                ((ByteBuffer)yuvImage.image[0].position(0)).put(data);

                if (RECORD_LENGTH <= 0) try {
                    Log.v(LOG_TAG,"Writing Frame");
                    long t = 1000 * (System.currentTimeMillis() - startTime);
                    if (t > recorder.getTimestamp()) {
                        recorder.setTimestamp(t);
                    }

                    filter.push(yuvImage);

                    Frame frame;
                    while ((frame = filter.pull()) != null) {
                        recorder.record(frame,avutil.AV_PIX_FMT_NV21);
                    }

                    //recorder.record(yuvImage);1
                } catch (FFmpegFrameRecorder.Exception e) {
                    Log.v(LOG_TAG, e.getMessage());
                    e.printStackTrace();
                } catch (FrameFilter.Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }

我该怎么做才能以纵向模式和全屏录制视频?提前致谢。

0 个答案:

没有答案