【问题标题】:Adding watermark on video在视频上添加水印
【发布时间】:2018-07-04 17:08:43
【问题描述】:

我创建了一个应用程序,它可以在 10 秒内从相机录制无声视频。这是程序代码的一部分:

...
MediaCodec mMediaCodec = MediaCodec.createEncoderByType("video/avc");
mMediaCodec.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
Surface mSurface = mMediaCodec.createInputSurface();
EGLDisplay mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
int[] e = new int[2];
EGL14.eglInitialize(mEGLDisplay, e, 0, e, 1);
EGLConfig[] mEGLConfig = new EGLConfig[1];
EGL14.eglChooseConfig(mEGLDisplay, new int[]{EGL14.EGL_RED_SIZE, 8, EGL14.EGL_GREEN_SIZE, 8, EGL14.EGL_BLUE_SIZE, 8, EGL14.EGL_ALPHA_SIZE, 8, EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 12610, 1, EGL14.EGL_NONE}, 0, mEGLConfig, 0, 1, new int[1], 0);
EGLContext mEGLContext = EGL14.eglCreateContext(mEGLDisplay, mEGLConfig[0], EGL14.EGL_NO_CONTEXT, new int[]{EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE}, 0);
EGLSurface mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mEGLConfig[0], mSurface, new int[]{EGL14.EGL_NONE}, 0);
mMediaCodec.start();
MediaMuxer mMediaMuxer = new MediaMuxer(new File(Environment.getExternalStorageDirectory(), "ipcamera.mp4").getPath(), OutputFormat.MUXER_OUTPUT_MPEG_4);
EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext);
FloatBuffer mFloatBuffer = ByteBuffer.allocateDirect(80).order(ByteOrder.nativeOrder()).asFloatBuffer();
mFloatBuffer.put(new float[]{-1, -1, 0, 0, 0, 1, -1, 0, 1, 0, -1, 1, 0, 0, 1, 1, 1, 0, 1, 1}).position(0);
float[] sm1 = new float[16], sm2 = new float[16];
Matrix.setIdentityM(sm1, 0);    int program = GLES20.glCreateProgram(), f = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER), params[] = new int[1];
GLES20.glShaderSource(f, "uniform mat4 uMVPMatrix;n" +          "uniform mat4 uSTMatrix;n" +            "attribute vec4 aPosition;n" +          "attribute vec4 aTextureCoord;n" +          "varying vec2 vTextureCoord;n" +            "void main() {n" +          "   gl_Position = uMVPMatrix * aPosition;n" +           "   vTextureCoord = (uSTMatrix * aTextureCoord).xy;n" +             "}n");  GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);  GLES20.glShaderSource(f = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER), "#extension GL_OES_EGL_image_external : requiren" +             "precision mediump float;n" +           "varying vec2 vTextureCoord;n" +            "uniform samplerExternalOES sTexture;n" +           "void main() {n" +          "   gl_FragColor = texture2D(sTexture, vTextureCoord);n" +          "}n");
GLES20.glCompileShader(f);
GLES20.glGetShaderiv(f, GLES20.GL_COMPILE_STATUS, params, 0);   GLES20.glAttachShader(program, f);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, params, 0);
if (params[0] != GLES20.GL_TRUE) GLES20.glDeleteProgram(program);
int maPositionHandle = GLES20.glGetAttribLocation(program, "aPosition"), maTextureHandle = GLES20.glGetAttribLocation(program, "aTextureCoord"), muMVPMatrixHandle = GLES20.glGetUniformLocation(program, "uMVPMatrix"), muSTMatrixHandle = GLES20.glGetUniformLocation(program, "uSTMatrix"), texName[] = new int[1];
GLES20.glGenTextures(1, texName, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
(mSurfaceTexture = new SurfaceTexture(texName[0])).setOnFrameAvailableListener(this);
mCamera.setPreviewTexture(mSurfaceTexture);
mCamera.startPreview();
long a = System.currentTimeMillis();
BufferInfo mBufferInfo = new BufferInfo();
boolean b = true;
int c, d = 0;
do {
    synchronized (VideoRecording.this.b) {
        if (!VideoRecording.this.b) continue; else VideoRecording.this.b = false;
    }
    mSurfaceTexture.updateTexImage();
    mSurfaceTexture.getTransformMatrix(sm1);
    GLES20.glClearColor(0, 0, 0, 1);
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
    GLES20.glUseProgram(program);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texName[0]);
    mFloatBuffer.position(0);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    mFloatBuffer.position(3);
    GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 20, mFloatBuffer);
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    Matrix.setIdentityM(sm2, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, sm2, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, sm1, 0);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
    EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
    if (!(b = System.currentTimeMillis() - a < 10000)) mMediaCodec.signalEndOfInputStream();
    while ((c = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000)) != MediaCodec.INFO_TRY_AGAIN_LATER || !b) {
        if (c == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            d = mMediaMuxer.addTrack(mMediaCodec.getOutputFormat());
            mMediaMuxer.start();
        } else if (c >= 0) {
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) mBufferInfo.size = 0; else mMediaMuxer.writeSampleData(d, (ByteBuffer) mMediaCodec.getOutputBuffers()[c].position(mBufferInfo.offset).limit(mBufferInfo.offset + mBufferInfo.size), mBufferInfo);
            mMediaCodec.releaseOutputBuffer(c, false);
            if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) break;
        }
    }
} while (b);
mMediaCodec.stop();
mMediaCodec.release();
EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
...
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (VideoRecording.this.b) {
        VideoRecording.this.b = true;
    }
}

实际上,我从流行的 CameraToMpegTest.java 导入了所有代码,并尝试通过将大量代码替换为一个块(在上面显示)来使其最简单。我在 Java 语言上编程了 3 年,但首先尝试使用 OpenGL Android 库。我已经阅读了很多关于这个主题的教程,但我发现关于通过MediaMuxer 和内置 OpenGL 库录制视频的信息很少。只有 Grafika 项目包含一些有用的东西。如何在具有指定坐标的视频上添加水印(例如 R.mipmap.ic_launcher)?在互联网上我没有找到太多关于它的信息,我在某个论坛上看到了这段代码:

Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.mipmap.ic_launcher);

//Generate one texture pointer...
gl.glGenTextures(1, textures, 0);

//...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

//Create Nearest Filtered Texture   gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

//Different possible texture parameters, e.g. GL10.GL_CLAMP_TO_EDGE
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, GL10.GL_REPEAT);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, GL10.GL_REPEAT);

//Use the Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);

//Clean up
bitmap.recycle();

但我真的不知道该把这段代码放在哪里……我几乎试着把它放在任何地方,但什么也没发生,或者我的视频被损坏了。除了 stackoverflow 中的一个问题(我丢失了一个链接),程序员确认这个东西需要两个 GLES20 程序......请告诉我在视频上添加水印的正确程序代码以及我可以在哪里放置它。也许不使用 OpenGL 而使用 MediaMuxerMediaCodec 甚至可以做到这一点?


不要向我提供不同的库,它们不是内置的,比如 FFMPEG。我必须通过内置的 Android 库来做到这一点。我的应用所需的最低 API 级别必须为 18 (Android 4.3.1)。


@NizaSiwale,这就是我所拥有的:

但我想要这个:

【问题讨论】:

    标签: android video opengl-es mediamuxer gles20


    【解决方案1】:

    您可以简单地使用MediaMuxer 转换从相机获取的位图,然后在位图中添加水印

    首先给你的相机添加一个回调,然后给你的位图添加水印

           private byte[] currentFrame;
                @Override
                public void onPreviewFrame(byte[] data, Camera camera) {
    
                    Size previewSize = camera.getParameters().getPreviewSize();
                    ByteArrayOutputStream baos = new ByteArrayOutputStream();
                    byte[] rawImage = null;
    
                    // Decode image from the retrieved buffer to JPEG
                    YuvImage yuv = new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
                    yuv.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), YOUR_JPEG_COMPRESSION, baos);
                    rawImage = baos.toByteArray();
    
    
                    Bitmap bitmap = BitmapFactory.decodeByteArray(rawImage, 0, rawImage.length);
                 currentFrame =  getNV21(src.getWidth(), src.getHeight(), mark(bitmap, yourWatermark,watermarkLocation));
                }
    
                public  Bitmap mark(Bitmap src, Bitmap watermark, Point location) {
                int w = src.getWidth();
                int h = src.getHeight();
                Bitmap result = Bitmap.createBitmap(w, h, src.getConfig());
    
                Canvas canvas = new Canvas(result);
                canvas.drawBitmap(src, 0, 0, null);
    
                canvas.drawBitmap(watermark, location.x, location.y, null);
    
                return result;
                }
     byte [] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
    
            int [] argb = new int[inputWidth * inputHeight];
    
            scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
    
            byte [] yuv = new byte[inputWidth*inputHeight*3/2];
            encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
    
            scaled.recycle();
    
            return yuv;
        }
    
        void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
            final int frameSize = width * height;
    
            int yIndex = 0;
            int uvIndex = frameSize;
    
            int a, R, G, B, Y, U, V;
            int index = 0;
            for (int j = 0; j < height; j++) {
                for (int i = 0; i < width; i++) {
    
                    a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
                    R = (argb[index] & 0xff0000) >> 16;
                    G = (argb[index] & 0xff00) >> 8;
                    B = (argb[index] & 0xff) >> 0;
    
                    // well known RGB to YUV algorithm
                    Y = ( (  66 * R + 129 * G +  25 * B + 128) >> 8) +  16;
                    U = ( ( -38 * R -  74 * G + 112 * B + 128) >> 8) + 128;
                    V = ( ( 112 * R -  94 * G -  18 * B + 128) >> 8) + 128;
    
                    // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
                    //    meaning for every 4 Y pixels there are 1 V and 1 U.  Note the sampling is every other
                    //    pixel AND every other scanline.
                    yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                    if (j % 2 == 0 && index % 2 == 0) { 
                        yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
                        yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
                    }
    
                    index ++;
                }
            }
        }
    

    之后,只需使用MediaMuxer 从帧(位图)创建视频

            private void prepareEncoder() {
                try {
                    mBufferInfo = new MediaCodec.BufferInfo();
    
                    mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
                    mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
                    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
                    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
                    if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.LOLLIPOP) {
                        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
                    }else{
                        mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);
                    }
                    //2130708361, 2135033992, 21
                    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    
                    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, SAMPLE_RATE, 1);
                    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
                    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
                    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
                    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    
                    mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                    mediaCodec.start();
    
                    mediaCodecForAudio = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
                    mediaCodecForAudio.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
                    mediaCodecForAudio.start();
    
                    try {
                        String outputPath = new File(Environment.getExternalStorageDirectory(),
                                "test.mp4").toString();
                        mediaMuxer = new MediaMuxer(outputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                    } catch (IOException ioe) {
                        throw new RuntimeException("MediaMuxer creation failed", ioe);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
    
        private void bufferEncoder() {
                runnable = new Runnable() {
                    @Override
                    public void run() {
                        prepareEncoder();
                        try {
                            while (mRunning) {
                                encode();
                            }
                            encode();
                        } finally {
                            release();
                        }
                    }
                };
                Thread thread = new Thread(runnable);
                thread.start();
            }
    
        public void encode() {
                    while (true) {
                        if (!mRunning) {
                            break;
                        }
                        int inputBufIndex = mediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
                        long ptsUsec = computePresentationTime(generateIndex);
                        if (inputBufIndex >= 0 && currentFrame!=null) {
    
                            byte[] input = currentFrame;
                            final ByteBuffer inputBuffer = mediaCodec.getInputBuffer(inputBufIndex);
                            inputBuffer.clear();
                            inputBuffer.put(input);
                            mediaCodec.queueInputBuffer(inputBufIndex, 0, input.length, ptsUsec, 0);
                            generateIndex++;
    currentFrame =null;
                        }
                        int encoderStatus = mediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
                        if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                            // no output available yet
                            Log.d("CODEC", "no output from encoder available");
                        } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                            // not expected for an encoder
                            MediaFormat newFormat = mediaCodec.getOutputFormat();
                            mTrackIndex = mediaMuxer.addTrack(newFormat);
                            mediaMuxer.start();
                        } else if (encoderStatus < 0) {
                            Log.i("CODEC", "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                        } else if (mBufferInfo.size != 0) {
                            ByteBuffer encodedData = mediaCodec.getOutputBuffer(encoderStatus);
                            if (encodedData == null) {
                                Log.i("CODEC", "encoderOutputBuffer " + encoderStatus + " was null");
                            } else {
                                encodedData.position(mBufferInfo.offset);
                                encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
                                mediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                                mediaCodec.releaseOutputBuffer(encoderStatus, false);
                            }
                        }
                    }
                }
            }
    

    完成后,只需释放MediaMuxer,它就会自动保存您的视频

    private void release() {
            if (mediaCodec != null) {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
                Log.i("CODEC", "RELEASE CODEC");
            }
            if (mediaMuxer != null) {
                mediaMuxer.stop();
                mediaMuxer.release();
                mediaMuxer = null;
                Log.i("CODEC", "RELEASE MUXER");
            }
        }
    

    【讨论】:

    • 酷,如果它对你有用,别忘了接受它。美好的一天:)
    • 嗯,录音有问题。我的视频有棕褐色效果。我可以将颜色格式CodecCapabilities.COLOR_FormatYUV420SemiPlanar 更改为其他吗?
    • 哈,我认为这就是问题所在。我尝试将其更改为CodecCapabilities.COLOR_FormatYUV420Planar,视频似乎是我通过塑料紫色波浪涂层录制的。如何解决?
    • 别那样做,只需使用参数parameter.setPreviewFormat(ImageFormat.NV21)将相机设置为Yuv格式@
    • 我尝试过:即使没有水印也可以录制视频;更改相机预览的表面纹理的 ID;几乎可以选择编解码器的所有功能;更改相机的预览格式;将相机从后到前;
    猜你喜欢
    • 1970-01-01
    • 1970-01-01
    • 2018-01-28
    • 1970-01-01
    • 1970-01-01
    • 1970-01-01
    • 2018-08-17
    • 1970-01-01
    • 1970-01-01
    相关资源
    最近更新 更多