位图到yuv,录制的视频只有绿色像素

时间:2016-01-05 12:44:46

标签: android video bitmap ffmpeg yuv

我正在尝试将位图转换为yuv,并在ffmpeg帧记录器中记录此yuv ... 我只使用绿色像素获取视频输出,但是当我检查此视频的属性时,它会显示设置的帧速率和分辨率......

yuv编码部分是正确的,但我觉得我在其他地方犯了错误,主要是将yuv字节返回到录制部分(getByte(byte [] yuv),因为只有控制台中显示的yuv.length为0, ,rest所有方法在控制台中返回一个很大的值......

请帮助......

 @Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    directory.mkdirs();

    addListenerOnButton();

    play=(Button)findViewById(R.id.buttonplay);
    stop=(Button)findViewById(R.id.buttonstop);
    record=(Button)findViewById(R.id.buttonstart);

    stop.setEnabled(false);
    play.setEnabled(false);


    record.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            startRecording();
            getByte(new byte[]{});
        }
    });

    stop.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            stopRecording();
        }
    });


    play.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) throws IllegalArgumentException, SecurityException, IllegalStateException {
            Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(String.valueOf(asmileys)));
            intent.setDataAndType(Uri.parse(String.valueOf(asmileys)), "video/mp4");
            startActivity(intent);
            Toast.makeText(getApplicationContext(), "Playing Video", Toast.LENGTH_LONG).show();
        }
    });

 }

......//......



public void getByte(byte[] yuv) {
    getNV21(640, 480, bitmap);
    System.out.println(yuv.length + " ");
    if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
        startTime = System.currentTimeMillis();
        return;
    }
    if (RECORD_LENGTH > 0) {
        int i = imagesIndex++ % images.length;
        yuvimage = images[i];
        timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
    }
        /* get video data */
    if (yuvimage != null && recording) {
            ((ByteBuffer) yuvimage.image[0].position(0)).put(yuv);

            if (RECORD_LENGTH <= 0) {
                try {
                    long t = 1000 * (System.currentTimeMillis() - startTime);
                    if (t > recorder.getTimestamp()) {
                        recorder.setTimestamp(t);
                    }
                    recorder.record(yuvimage);
                } catch (FFmpegFrameRecorder.Exception e) {

                    e.printStackTrace();
                }
            }
        }
}

public byte [] getNV21(int inputWidth, int inputHeight, Bitmap bitmap) {

    int[] argb = new int[inputWidth * inputHeight];

    bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);

    byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
    encodeYUV420SP(yuv, argb, inputWidth, inputHeight);

    bitmap.recycle();
    System.out.println(yuv.length + " ");
    return yuv;

}

void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
    final int frameSize = width * height;

    int yIndex = 0;
    int uIndex = frameSize;
    int vIndex = frameSize;
    System.out.println(yuv420sp.length + " " + frameSize);

    int a, R, G, B, Y, U, V;
    int index = 0;
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {

            a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
            R = (argb[index] & 0xff0000) >> 16;
            G = (argb[index] & 0xff00) >> 8;
            B = (argb[index] & 0xff) >> 0;

            // well known RGB to YUV algorithm

            Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
            U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
            V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

            // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
            //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
            //    pixel AND every other scanline.
            yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
            if (j % 2 == 0 && index % 2 == 0) {
                yuv420sp[uIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
                yuv420sp[vIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
            }

            index++;
        }
    }
}

 .....//.....

 public void addListenerOnButton() {
 image = (ImageView) findViewById(R.id.imageView);
 image.setDrawingCacheEnabled(true);
 image.buildDrawingCache();
 bitmap = image.getDrawingCache();
 System.out.println(bitmap.getByteCount() + " " );

 button = (Button) findViewById(R.id.btn1);
 button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view){
    image.setImageResource(R.drawable.image1);
  }
});

......//......

编辑1:

我在上面的代码中做了一些更改:

 record.setOnClickListener(new View.OnClickListener() {
        @Override
        public void onClick(View v) {
            startRecording();
            getByte();
        }
    });
.....//....

public void getbyte() {
    byte[] yuv = getNV21(640, 480, bitmap);

所以现在在控制台;我在此方法中得到的yuv长度与getNV21方法的yuv长度相同..

但是现在我在录制的视频中获得半屏黑色和半屏绿色(上面的黑色和下面的绿色)像素......

如果我将这些行添加到onCreate方法;

 image = (ImageView) findViewById(R.id.imageView);
 image.setDrawingCacheEnabled(true);
 image.buildDrawingCache();
 bitmap = image.getDrawingCache();

我确实在视频中得到了扭曲的帧(帧是显示的图像的1/4,混合了颜色)....

我想要学习的是Bytes []从一种方法到另一种方法的图像处理和流程;但我还是个菜鸟。

请帮助..!

0 个答案:

没有答案