我使用TextureView
和MediaPlayer
作为我的自定义视频组件。
如果是视频'尺寸大小或与TextureView
的尺寸相同,那么一切都很好。
但如果视频的尺寸较小(例如720x576且TextureView
尺寸为1280x1024),则质量很差,就好像没有应用抗锯齿一样。)
有趣的是,如果我在完全相同的情况下使用SurfaceView
,似乎SurfaceView
会应用某种抗锯齿,所以我会得到更好的图片。
我尝试将Paint
对象应用于TextureView
:
Paint paint = new Paint();
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
paint.setAntiAlias(true);
setLayerPaint(paint);
但它并没有改善这种情况。
我发现使用setScaleX(1.00001f);
有助于,但不是很多,质量仍然很差。
有没有办法将抗锯齿应用于TextureView
?
这是组件代码。
package com.example.app;
import android.app.Activity;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import java.util.HashMap;
import com.example.app.entities.Channel;
public class TextureVideoView extends TextureView implements MediaPlayer.OnPreparedListener, TextureView.SurfaceTextureListener {
private Context context;
private MediaPlayer mediaPlayer;
private SurfaceTexture surfaceTexture;
private Uri uri;
private Surface surface;
private Channel.AspectRatio currentAspectRatio;
private Channel.AspectRatio targetAspectRatio;
private int videoWidth = 0;
private int videoHeight = 0;
private int screenWidth;
private int screenHeight;
private int targetState = STATE_IDLE;
private int currentState = STATE_IDLE;
private static final int STATE_IDLE = 0;
private static final int STATE_PLAYING = 1;
private static final int STATE_PAUSED = 2;
private static final int STATE_PREPARING = 3;
private static final int STATE_PREPARED = 4;
public TextureVideoView(Context context) {
super(context);
this.context = context;
Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
Point size = new Point();
display.getSize(size);
screenWidth = size.x;
screenHeight = size.y;
setScaleX(1.00001f);
Paint paint = new Paint();
paint.setDither(true);
paint.setFilterBitmap(true);
paint.setFlags(Paint.ANTI_ALIAS_FLAG);
paint.setAntiAlias(true);
setLayerPaint(paint);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
setLayoutParams(params);
currentAspectRatio = Channel.getInstance().getFullScreenAspectRatio();
setSurfaceTextureListener(this);
}
public void setVideoURI(Uri uri) {
release();
this.uri = uri;
if (surfaceTexture == null) {
return;
}
try {
mediaPlayer = new MediaPlayer();
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setDataSource(context, uri, new HashMap<String, String>());
mediaPlayer.setScreenOnWhilePlaying(true);
mediaPlayer.prepareAsync();
surface = new Surface(surfaceTexture);
mediaPlayer.setSurface(surface);
currentState = STATE_PREPARING;
}
catch (Exception e) {
}
}
public void start() {
if (isInPlaybackState()) {
mediaPlayer.start();
}
targetState = STATE_PLAYING;
}
public void pause() {
if (isInPlaybackState()) {
mediaPlayer.pause();
currentState = STATE_PAUSED;
}
targetState = STATE_PAUSED;
}
public void stopPlayback() {
if (mediaPlayer != null) {
mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
currentState = STATE_IDLE;
targetState = STATE_IDLE;
}
}
public int getCurrentPosition() {
return mediaPlayer.getCurrentPosition();
}
public boolean isPlaying() {
return mediaPlayer.isPlaying();
}
private boolean isInPlaybackState() {
return mediaPlayer != null && (currentState == STATE_PLAYING || currentState == STATE_PREPARED);
}
private void release() {
if (mediaPlayer != null) {
mediaPlayer.reset();
mediaPlayer.release();
}
if (surface != null) {
surface.release();
}
}
@Override
public void onPrepared(MediaPlayer mp) {
currentState = STATE_PREPARED;
if (targetState == STATE_PLAYING) {
start();
}
else if (targetState == STATE_PAUSED) {
pause();
}
videoWidth = mediaPlayer.getVideoWidth();
videoHeight = mediaPlayer.getVideoHeight();
applyAspectRatio();
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
surfaceTexture = surface;
if (currentState == STATE_IDLE && uri != null) {
setVideoURI(uri);
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
public void setAspectRatio(Channel.AspectRatio aspectRatio) {
targetAspectRatio = aspectRatio;
applyAspectRatio();
}
public void clearCurrentAspectRatio() {
currentAspectRatio = null;
videoWidth = 0;
videoHeight = 0;
}
private void applyAspectRatio() {
if (videoWidth == 0 || videoHeight == 0) {
return;
}
currentAspectRatio = targetAspectRatio;
System.out.println(currentAspectRatio.label);
System.out.println("screen width: " + screenWidth);
System.out.println("screen height: " + screenHeight);
System.out.println("original video width: " + videoWidth);
System.out.println("original video height: " + videoHeight);
ViewGroup.LayoutParams params = getLayoutParams();
if (currentAspectRatio.ratio == Channel.RATIO_FULL_WIDTH) {
params.width = screenWidth;
params.height = videoHeight * screenWidth / videoWidth;
}
else {
params.height = screenHeight;
switch (currentAspectRatio.ratio) {
case (Channel.RATIO_16_9):
params.width = screenHeight * 16 / 9;
break;
case (Channel.RATIO_4_3):
params.width = screenHeight * 4 / 3;
break;
case (Channel.RATIO_ORIGINAL):
params.width = videoWidth * screenHeight / videoHeight;
break;
}
}
System.out.println("video width: " + params.width);
System.out.println("video height: " + params.height);
if (params.width == getWidth() && params.height == getHeight()) {
return;
}
setLayoutParams(params);
}
}
更新
根据fadden的回答,我写了这段代码:
TextureView textureView = new TextureView(this);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
textureView.setLayoutParams(params);
((ViewGroup)findViewById(android.R.id.content)).addView(textureView);
textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
try {
//stand-alone Surface with TextureView to receive
//data from MediaPlayer
Surface source = new Surface(new SurfaceTexture(111));
EglCore mEglCore = new EglCore(null, EglCore.FLAG_TRY_GLES3);
//WindowSurface backed by a SurfaceTexture that was received
//from a TextureView that is in my layout
WindowSurface windowSurface = new WindowSurface(mEglCore,
new Surface(surfaceTexture), true);
//Make that WindowSurface read data from the source
//(stand-alone Surface), which in turn receives data
//from the MediaPlayer
windowSurface.makeCurrentReadFrom(new WindowSurface(mEglCore,
source, true));
//Change the scaling mode.
//is it ok that I use GLES20.GL_TEXTURE_2D?
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
MediaPlayer mediaPlayer = new MediaPlayer();
//The MediaPlayer directs data to the
//stand-alone Surface, as a result the
//windowSurface must output that data with
//GL_TEXTURE_MAG_FILTER set to GLES20.GL_LINEAR
mediaPlayer.setSurface(source);
mediaPlayer.setDataSource(TestActivity.this,
Uri.parse("http://some.source"));
mediaPlayer.prepare();
mediaPlayer.start();
} catch (Exception e) {
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
});
但是我收到了这个错误:
E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)
有什么问题?
更新
最后,我得到了@fadden建议的工作。但是GL_LINEAR
在我的情况下是不够的。 SurfaceView使用更高级的东西(比如双三次插值)。
我尝试在GLSL
中使用Bi-Cubic插值作为片段着色器(基于此处的来源:http://www.codeproject.com/Articles/236394/Bi-Cubic-and-Bi-Linear-Interpolation-with-GLSL)
但就我而言,它并没有正常运作。图像越来越暗,性能越差(~5 fps),我也得到水平和垂直条纹。可能有什么不对?
#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;
float BellFunc( float f )
{
float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
if( f > -1.5 && f < -0.5 )
{
return( 0.5 * pow(f + 1.5, 2.0));
}
else if( f > -0.5 && f < 0.5 )
{
return 3.0 / 4.0 - ( f * f );
}
else if( ( f > 0.5 && f < 1.5 ) )
{
return( 0.5 * pow(f - 1.5, 2.0));
}
return 0.0;
}
vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
float texelSizeX = 1.0 / invScreenSize.x; //size of one texel
float texelSizeY = 1.0 / invScreenSize.y; //size of one texel
vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
for( int m = -1; m <=2; m++ )
{
for( int n =-1; n<= 2; n++)
{
vec4 vecData = texture2D(textureSampler,
TexCoord + vec2(texelSizeX * float( m ),
texelSizeY * float( n )));
float f = BellFunc( float( m ) - a );
vec4 vecCooef1 = vec4( f,f,f,f );
float f1 = BellFunc ( -( float( n ) - b ) );
vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
nSum = nSum + ( vecData * vecCoeef2 * vecCooef1 );
nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
}
}
return nSum / nDenom;
}
void main() {
gl_FragColor = BiCubic(sTexture, vTextureCoord);
}