在视频上添加水印

时间:2018-07-04 17:08:44

标签: android video opengl-es mediamuxer gles20

我创建了一个应用,该应用可以录制10秒无声音的摄像机视频。那就是程序代码的一部分:

...
MediaCodec mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface mSurface = mMediaCodec.createInputSurface();
EGLDisplay mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] e = new int[2];
EGL14.eglInitialize(mEGLDisplay, e, 0, e, 1);
EGLConfig[] mEGLConfig = new EGLConfig[1];
EGL14.eglChooseConfig(mEGLDisplay, new int[]{EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_ALPHA_SIZE, 8, EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 12610, 1, EGL14.EGL_NONE}, 0, mEGLConfig, 0, 1, new int[1], 0);
EGLContext mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig[0], EGL14.EGL_NO_CONTEXT, new int[]{EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}, 0);
EGLSurface mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig[0], mSurface, new int[]{EGL14.EGL_NONE}, 0);
mMediaCodec.start();
MediaMuxer mMediaMuxer = new MediaMuxer(new File(Environment.getExternalStorageDirectory(), "ipcamera.mp4").getPath(), OutputFormat.MUXER_OUTPUT_MPEG_4);
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
FloatBuffer mFloatBuffer = ByteBuffer.allocateDirect(80).order(ByteOrder.nativeOrder()).asFloatBuffer();
mFloatBuffer.put(new float[]{-1, -1, 0, 0, 0, 1, -1, 0, 1, 0, -1, 1, 0, 0, 1, 1, 1, 0, 1, 1}).position(0);
float[] sm1 = new float[16], sm2 = new float[16];
Matrix.setIdentityM(sm1, 0);    int program = GLES20.glCreateProgram(), f = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER), params[] = new int[1];
GLES20.glShaderSource(f, "uniform mat4 uMVPMatrix;n" +          "uniform mat4 uSTMatrix;n" +            "attribute vec4 aPosition;n" +          "attribute vec4 aTextureCoord;n" +          "varying vec2 vTextureCoord;n" +            "void main() {n" +          "   gl_Position = uMVPMatrix * aPosition;n" +           "   vTextureCoord = (uSTMatrix * aTextureCoord).xy;n" +             "}n");  GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);  GLES20.glShaderSource(f = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER), "#extension GL_OES_EGL_image_external : requiren" +             "precision mediump float;n" +           "varying vec2 vTextureCoord;n" +            "uniform samplerExternalOES sTexture;n" +           "void main() {n" +          "   gl_FragColor = texture2D(sTexture, vTextureCoord);n" +          "}n");
GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, params, 0);
if (params[0] != GLES20.GL_TRUE) GLES20.glDeleteProgram(program);
int maPositionHandle = GLES20.glGetAttribLocation(program, "aPosition"), maTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord"), muMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix"), muSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix"), texName[] = new int[1];
GLES20.glGenTextures(1, texName, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
(mSurfaceTexture = new SurfaceTexture(texName[0])).setOnFrameAvailableListener(this);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
long a = System.currentTimeMillis();
BufferInfo mBufferInfo = new BufferInfo();
boolean b = true;
int c, d = 0;
do {
    synchronized (VideoRecording.this.b) {
        if (!VideoRecording.this.b) continue; else VideoRecording.this.b = false;
    }
    mSurfaceTexture.updateTexImage();
    mSurfaceTexture.getTransformMatrix(sm1);
    GLES20.glClearColor(0, 0, 0, 1);
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
    GLES20.glUseProgram(program);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
    mFloatBuffer.position(0);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    mFloatBuffer.position(3);
    GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    Matrix.setIdentityM(sm2, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, sm2, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, sm1, 0);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
    if (!(b = System.currentTimeMillis() - a < 10000)) mMediaCodec.signalEndOfInputStream();
    while ((c = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000)) != MediaCodec.INFO_TRY_AGAIN_LATER || !b) {
        if (c == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            d = mMediaMuxer.addTrack(mMediaCodec.getOutputFormat());
            mMediaMuxer.start();
        } else if (c >= 0) {
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) mBufferInfo.size = 0; else mMediaMuxer.writeSampleData(d, (ByteBuffer) mMediaCodec.getOutputBuffers()[c].position(mBufferInfo.offset).limit(mBufferInfo.offset + mBufferInfo.size), mBufferInfo);
            mMediaCodec.releaseOutputBuffer(c, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) break;
        }
    }
} while (b);
mMediaCodec.stop();
mMediaCodec.release();
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
...
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (VideoRecording.this.b) {
        VideoRecording.this.b = true;
    }
}

实际上,我从流行的CameraToMpegTest.java导入了所有代码,并试图通过将很多代码替换为一个块(在上方显示)来使其最大程度地简化。我使用Java语言编程已有3年,但尝试首先使用OpenGL Android库。我已经阅读了很多有关此主题的教程,但是我发现关于通过MediaMuxer和内置OpenGL库录制视频的信息很少。只有Grafika项目包含有用的东西。如何在具有指定坐标的视频上添加水印(例如R.mipmap.ic_launcher)?在互联网上,我发现的信息不多,我在一些论坛上看到了以下代码:

Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher);

//Generate one texture pointer...
gl.glGenTextures(1, textures, 0);

//...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

//Create Nearest Filtered Texture   gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);

//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);

//Clean up
bitmap.recycle();

但是我真的不知道该把代码放在哪里...我试图将其放在几乎所有地方,但是什么也没发生,或者我的视频已损坏。以及stackoverflow中的一个问题(我失去了一个链接),程序员们确认这件事需要两个GLES20程序...请告诉我在视频上添加水印的正确程序代码,以及我该在哪里放置它。也许甚至可以不使用OpenGL而使用MediaMuxerMediaCodec来做到这一点?


不要为我提供不同的库,这些库不是内置的,例如FFMPEG。我必须通过内置的Android库来做到这一点。我的应用程序要求的最低API级别必须为18(Android 4.3.1)。


@NizaSiwale,这就是我所拥有的:

enter image description here

但是我想要这个:

enter image description here

1 个答案:

答案 0 :(得分:1)

您可以简单地使用MediaMuxer转换从相机获取的位图,然后在位图上添加水印

首先将回调添加到相机,然后将水印添加到位图

       private byte[] currentFrame;
            @Override
            public void onPreviewFrame(byte[] data, Camera camera) {

                Size previewSize = camera.getParameters().getPreviewSize();
                ByteArrayOutputStream baos = new ByteArrayOutputStream();
                byte[] rawImage = null;

                // Decode image from the retrieved buffer to JPEG
                YuvImage yuv = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
                yuv.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), YOUR_JPEG_COMPRESSION, baos);
                rawImage = baos.toByteArray();


                Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length);
             currentFrame =  getNV21(src.getWidth(), src.getHeight(), mark(bitmap, yourWatermark,watermarkLocation));
            }

            public  Bitmap mark(Bitmap src, Bitmap watermark, Point location) {
            int w = src.getWidth();
            int h = src.getHeight();
            Bitmap result = Bitmap.createBitmap(w, h, src.getConfig());

            Canvas canvas = new Canvas(result);
            canvas.drawBitmap(src, 0, 0, null);

            canvas.drawBitmap(watermark, location.x, location.y, null);

            return result;
            }
 byte [] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {

        int [] argb = new int[inputWidth * inputHeight];

        scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);

        byte [] yuv = new byte[inputWidth*inputHeight*3/2];
        encodeYUV420SP(yuv, argb, inputWidth, inputHeight);

        scaled.recycle();

        return yuv;
    }

    void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
        final int frameSize = width * height;

        int yIndex = 0;
        int uvIndex = frameSize;

        int a, R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {

                a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = (argb[index] & 0xff) >> 0;

                // well known RGB to YUV algorithm
                Y = ( (  66 * R + 129 * G +  25 * B + 128) >> 8) +  16;
                U = ( ( -38 * R -  74 * G + 112 * B + 128) >> 8) + 128;
                V = ( ( 112 * R -  94 * G -  18 * B + 128) >> 8) + 128;

                // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
                //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
                //    pixel AND every other scanline.
                yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) { 
                    yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
                    yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
                }

                index ++;
            }
        }
    }

之后,只需使用MediaMuxer从帧(位图)创建视频

        private void prepareEncoder() {
            try {
                mBufferInfo = new MediaCodec.BufferInfo();

                mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
                mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
                mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
                mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
                if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP) {
                    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
                }else{
                    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
                }
                //2130708361, 2135033992, 21
                mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);

                final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, 1);
                audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
                audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
                audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
                audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

                mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                mediaCodec.start();

                mediaCodecForAudio = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
                mediaCodecForAudio.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                mediaCodecForAudio.start();

                try {
                    String outputPath = new File(Environment.getExternalStorageDirectory(),
                            "test.mp4").toString();
                    mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                } catch (IOException ioe) {
                    throw new RuntimeException("MediaMuxer creation failed", ioe);
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

    private void bufferEncoder() {
            runnable = new Runnable() {
                @Override
                public void run() {
                    prepareEncoder();
                    try {
                        while (mRunning) {
                            encode();
                        }
                        encode();
                    } finally {
                        release();
                    }
                }
            };
            Thread thread = new Thread(runnable);
            thread.start();
        }

    public void encode() {
                while (true) {
                    if (!mRunning) {
                        break;
                    }
                    int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
                    long ptsUsec = computePresentationTime(generateIndex);
                    if (inputBufIndex >= 0 && currentFrame!=null) {

                        byte[] input = currentFrame;
                        final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
                        inputBuffer.clear();
                        inputBuffer.put(input);
                        mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0);
                        generateIndex++;
currentFrame =null;
                    }
                    int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
                    if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                        // no output available yet
                        Log.d("CODEC", "no output from encoder available");
                    } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                        // not expected for an encoder
                        MediaFormat newFormat = mediaCodec.getOutputFormat();
                        mTrackIndex = mediaMuxer.addTrack(newFormat);
                        mediaMuxer.start();
                    } else if (encoderStatus < 0) {
                        Log.i("CODEC", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    } else if (mBufferInfo.size != 0) {
                        ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
                        if (encodedData == null) {
                            Log.i("CODEC", "encoderOutputBuffer " + encoderStatus + " was null");
                        } else {
                            encodedData.position(mBufferInfo.offset);
                            encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                            mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                            mediaCodec.releaseOutputBuffer(encoderStatus, false);
                        }
                    }
                }
            }
        }

完成后,只需释放MediaMuxer,它将自动保存您的视频

private void release() {
        if (mediaCodec != null) {
            mediaCodec.stop();
            mediaCodec.release();
            mediaCodec = null;
            Log.i("CODEC", "RELEASE CODEC");
        }
        if (mediaMuxer != null) {
            mediaMuxer.stop();
            mediaMuxer.release();
            mediaMuxer = null;
            Log.i("CODEC", "RELEASE MUXER");
        }
    }