将图像编码为电影文件

时间:2014-04-03 17:18:44

标签: android ffmpeg mediacodec

我正在尝试将jpgs保存到电影中,我尝试了jcodec,而且我的s3很好地播放了其他设备没有。包括vlc和windows media

我刚刚花了大部分时间玩MediaCodec,虽然SDK非常高,它可以帮助那些使用果冻豆及以上的人。但我无法弄清楚如何将文件送到编码器然后写入文件。

理想情况下,我不会支持SDK 9/8

是否有任何人可以分享任何代码,以获得MediaCodec或其他选项。如果你说ffmpeg,我很乐意,但我的jin知识不存在,我需要一个非常好的指南。

到目前为止MediaCodec的代码

public class EncodeAndMux extends AsyncTask<Integer, Void, Boolean> {
    private static int bitRate = 2000000;
    private static int MAX_INPUT = 100000;
    private static String mimeType = "video/avc";

    private int frameRate = 15;     
    private int colorFormat;
    private int stride = 1;
    private int sliceHeight = 2;        

    private MediaCodec encoder = null;
    private MediaFormat inputFormat;
    private MediaCodecInfo codecInfo = null;
    private MediaMuxer muxer;
    private boolean mMuxerStarted = false;
    private int mTrackIndex = 0;  
    private long presentationTime = 0;
    private Paint bmpPaint;

    private static int WAITTIME = 10000;
    private static String TAG = "ENCODE";

    private ArrayList<String> mFilePaths;
    private String mPath;

    private EncodeListener mListener;
    private int width = 320;
    private int height = 240;
    private double mSpeed = 1;

    public EncodeAndMux(ArrayList<String> filePaths, String savePath) {
        mFilePaths = filePaths;
        mPath = savePath;   

        // Create paint to draw BMP
        bmpPaint = new Paint();
        bmpPaint.setAntiAlias(true);
        bmpPaint.setFilterBitmap(true);
        bmpPaint.setDither(true);
    }

    public void setListner(EncodeListener listener) {
        mListener = listener;
    }

    // set the speed, how many frames a second
    public void setSpead(int speed) {
        mSpeed = speed;
    }

    public double getSpeed() {
        return mSpeed;
    }

    private long computePresentationTime(int frameIndex) {
        final long ONE_SECOND = 1000000;
        return (long) (frameIndex * (ONE_SECOND / mSpeed));
    }

    public interface EncodeListener {
        public void finished();
        public void errored();
    }

    @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
    @Override
    protected Boolean doInBackground(Integer... params) {

        try {
            muxer = new MediaMuxer(mPath, OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (Exception e){ 
            e.printStackTrace();
        }

        // Find a code that supports the mime type
        int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs && codecInfo == null; i++) {
            MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
            if (!info.isEncoder()) {
                continue;
            }
            String[] types = info.getSupportedTypes();
            boolean found = false;

            for (int j = 0; j < types.length && !found; j++) {
                if (types[j].equals(mimeType))
                    found = true;
            }

            if (!found)
                continue;
            codecInfo = info;
        }


         for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
                 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
                 if (!info.isEncoder()) {
                     continue;
                 }

                 String[] types = info.getSupportedTypes();
                 for (int j = 0; j < types.length; ++j) {
                     if (types[j] != mimeType) 
                         continue;
                     MediaCodecInfo.CodecCapabilities caps = info.getCapabilitiesForType(types[j]);
                     for (int k = 0; k < caps.profileLevels.length; k++) {
                         if (caps.profileLevels[k].profile == MediaCodecInfo.CodecProfileLevel.AVCProfileHigh && caps.profileLevels[k].level == MediaCodecInfo.CodecProfileLevel.AVCLevel4) {
                             codecInfo = info;
                         }
                     }
                 }
         }

        Log.d(TAG, "Found " + codecInfo.getName() + " supporting " + mimeType);

        MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
        for (int i = 0; i < capabilities.colorFormats.length && colorFormat == 0; i++) {
            int format = capabilities.colorFormats[i];
            switch (format) {
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
                case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
                colorFormat = format;
                break;
            }
        }
        Log.d(TAG, "Using color format " + colorFormat);

        // Determine width, height and slice sizes
        if (codecInfo.getName().equals("OMX.TI.DUCATI1.VIDEO.H264E")) {
            // This codec doesn't support a width not a multiple of 16,
            // so round down.
            width &= ~15;
        }

        stride = width;
        sliceHeight = height;

        if (codecInfo.getName().startsWith("OMX.Nvidia.")) {
            stride = (stride + 15) / 16 * 16;
            sliceHeight = (sliceHeight + 15) / 16 * 16;
        }

        inputFormat = MediaFormat.createVideoFormat(mimeType, width, height);
        inputFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
        inputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        inputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
        inputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
//          inputFormat.setInteger("stride", stride);
//          inputFormat.setInteger("slice-height", sliceHeight);
        inputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, MAX_INPUT);

        encoder = MediaCodec.createByCodecName(codecInfo.getName());
        encoder.configure(inputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        encoder.start();

        ByteBuffer[] inputBuffers = encoder.getInputBuffers();
        ByteBuffer[] outputBuffers = encoder.getOutputBuffers();

        int inputBufferIndex= -1, outputBufferIndex= -1;
        BufferInfo info = new BufferInfo();
        for (int i = 0; i < mFilePaths.size(); i++) {

            // use decode sample to calculate inSample size and then resize
            Bitmap bitmapIn = Images.decodeSampledBitmapFromPath(mFilePaths.get(i), width, height);   

            // Create blank bitmap 
            Bitmap bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);                   

            // Center scaled image
            Canvas canvas = new Canvas(bitmap);                 
            canvas.drawBitmap(bitmapIn,(bitmap.getWidth()/2)-(bitmapIn.getWidth()/2),(bitmap.getHeight()/2)-(bitmapIn.getHeight()/2), bmpPaint);

            Log.d(TAG, "Bitmap width: " + bitmapIn.getWidth() + " height: " + bitmapIn.getHeight() + " WIDTH: " + width + " HEIGHT: " + height);
            byte[] dat = getNV12(width, height, bitmap);
            bitmap.recycle();

            // Exception occurred on this below line in Emulator, LINE No. 182//**
            inputBufferIndex = encoder.dequeueInputBuffer(WAITTIME);
            Log.i("DAT", "Size= "+dat.length);

            if(inputBufferIndex >= 0){
                int samplesiz= dat.length;
                inputBuffers[inputBufferIndex].put(dat);
                presentationTime = computePresentationTime(i);
                if (i == mFilePaths.size()) {
                    encoder.queueInputBuffer(inputBufferIndex, 0, samplesiz, presentationTime, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    Log.i(TAG, "Last Frame");
                } else { 
                    encoder.queueInputBuffer(inputBufferIndex, 0, samplesiz, presentationTime, 0);
                }

                while(true) {
                   outputBufferIndex = encoder.dequeueOutputBuffer(info, WAITTIME);
                   Log.i("BATA", "outputBufferIndex="+outputBufferIndex);
                   if (outputBufferIndex >= 0) {
                       ByteBuffer encodedData = outputBuffers[outputBufferIndex];
                       if (encodedData == null) {
                           throw new RuntimeException("encoderOutputBuffer " + outputBufferIndex +
                                   " was null");
                       }

                       if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                           // The codec config data was pulled out and fed to the muxer when we got
                           // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                           Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
                           info.size = 0;
                       }

                       if (info.size != 0) {
                           if (!mMuxerStarted) {
                               throw new RuntimeException("muxer hasn't started");
                           }

                           // adjust the ByteBuffer values to match BufferInfo (not needed?)
                           encodedData.position(info.offset);
                           encodedData.limit(info.offset + info.size);

                           muxer.writeSampleData(mTrackIndex, encodedData, info);
                           Log.d(TAG, "sent " + info.size + " bytes to muxer");
                       }

                       encoder.releaseOutputBuffer(outputBufferIndex, false);

                       inputBuffers[inputBufferIndex].clear();
                       outputBuffers[outputBufferIndex].clear();

                       if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                           break;      // out of while
                       }

                   } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                       // Subsequent data will conform to new format.
                       MediaFormat opmediaformat = encoder.getOutputFormat();
                       if (!mMuxerStarted) {
                           mTrackIndex = muxer.addTrack(opmediaformat);
                           muxer.start();
                           mMuxerStarted = true;
                       }
                       Log.i(TAG, "op_buf_format_changed: " + opmediaformat);
                   } else if(outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                       outputBuffers = encoder.getOutputBuffers();
                       Log.d(TAG, "Output Buffer changed " + outputBuffers);
                   } else if(outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                       // No Data, break out
                       break;
                   } else {
                       // Unexpected State, ignore it
                       Log.d(TAG, "Unexpected State " + outputBufferIndex);
                   }
                }

            }     
        }

        if (encoder != null) {
            encoder.flush();
            encoder.stop();
            encoder.release();
            encoder = null;
        }

        if (muxer != null) {
            muxer.stop();
            muxer.release();
            muxer = null;
        }

        return true;

    };

    @Override
    protected void onPostExecute(Boolean result) {
        if (result) {
            if (mListener != null)
                mListener.finished();
        } else {
            if (mListener != null)
                mListener.errored();
        }
        super.onPostExecute(result);
    }



    byte [] getNV12(int inputWidth, int inputHeight, Bitmap scaled) {
        int [] argb = new int[inputWidth * inputHeight];
        scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
        byte [] yuv = new byte[inputWidth*inputHeight*3/2];
        encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
        scaled.recycle();
        return yuv;
    }


    void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
        final int frameSize = width * height;
        int yIndex = 0;
        int uvIndex = frameSize;
        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YVU algorithm
                Y = ( (  66 * R + 129 * G +  25 * B + 128) >> 8) +  16;
                V = ( ( -38 * R -  74 * G + 112 * B + 128) >> 8) + 128;
                U = ( ( 112 * R -  94 * G -  18 * B + 128) >> 8) + 128;

                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) { 
                    yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
                    yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
                }

                index ++;
            }
        }
    }
}

现在已经在我的4台设备上进行了测试并且工作正常,是否有办法

1 /计算MAX_INPUT(高和N7 II它崩溃,我不希望一旦发布就发生这种情况) 2 /提供api 16解决方案? 3 /我需要步幅和步幅吗?

由于

1 个答案:

答案 0 :(得分:1)

如果您能够负担3rd party app的依赖,则可以使用少量Java代码来控制ffmpeg。我不确定这个项目是否尽最大努力使用硬件编码器。

或者,在API 9+上,您可以使用stagefright(您需要JNI与之通信,除了AOSP之外没有可用的公共资源)。

您可以构建自己的ffmpeg库,例如http://www.origenboard.org/wiki/index.php/FFmpeg_on_Android