0

我将TextureViewwithMediaPlayer用于我的自定义视频组件。

如果视频的尺寸更大或与 ' 尺寸相同TextureView,那么一切都很好。

但如果视频尺寸较小(比如 720x576,TextureView尺寸为 1280x1024,则质量很差,就好像没有应用抗锯齿一样)。

有趣的是,如果我SurfaceView在完全相同的情况下使用,似乎SurfaceView应用了某种抗锯齿,所以我得到了更好的画面。

我试图将一个Paint对象应用于TextureView

    Paint paint = new Paint();
    paint.setFlags(Paint.ANTI_ALIAS_FLAG);
    paint.setAntiAlias(true);
    setLayerPaint(paint);

但这并没有改善情况。

我发现使用有setScaleX(1.00001f);帮助,但效果不大,质量仍然很差。

有没有办法应用抗锯齿TextureView

这是组件代码。

package com.example.app;

import android.app.Activity;
import android.content.Context;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.media.MediaPlayer;
import android.net.Uri;
import android.view.Display;
import android.view.Surface;
import android.view.TextureView;
import android.view.ViewGroup;
import android.widget.LinearLayout;

import java.util.HashMap;

import com.example.app.entities.Channel;

public class TextureVideoView extends TextureView implements MediaPlayer.OnPreparedListener, TextureView.SurfaceTextureListener {

    private Context context;
    private MediaPlayer mediaPlayer;
    private SurfaceTexture surfaceTexture;
    private Uri uri;
    private Surface surface;
    private Channel.AspectRatio currentAspectRatio;
    private Channel.AspectRatio targetAspectRatio;

    private int videoWidth = 0;
    private int videoHeight = 0;

    private int screenWidth;
    private int screenHeight;

    private int targetState = STATE_IDLE;
    private int currentState = STATE_IDLE;

    private static final int STATE_IDLE = 0;
    private static final int STATE_PLAYING = 1;
    private static final int STATE_PAUSED = 2;
    private static final int STATE_PREPARING = 3;
    private static final int STATE_PREPARED = 4;

    public TextureVideoView(Context context) {
        super(context);

        this.context = context;

        Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
        Point size = new Point();
        display.getSize(size);
        screenWidth = size.x;
        screenHeight = size.y;

        setScaleX(1.00001f);
        Paint paint = new Paint();
        paint.setDither(true);
        paint.setFilterBitmap(true);
        paint.setFlags(Paint.ANTI_ALIAS_FLAG);
        paint.setAntiAlias(true);
        setLayerPaint(paint);
        LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
        setLayoutParams(params);
        currentAspectRatio = Channel.getInstance().getFullScreenAspectRatio();
        setSurfaceTextureListener(this);
    }

    public void setVideoURI(Uri uri) {
        release();
        this.uri = uri;
        if (surfaceTexture == null) {
            return;
        }
        try {
            mediaPlayer = new MediaPlayer();
            mediaPlayer.setOnPreparedListener(this);
            mediaPlayer.setDataSource(context, uri, new HashMap<String, String>());
            mediaPlayer.setScreenOnWhilePlaying(true);
            mediaPlayer.prepareAsync();
            surface = new Surface(surfaceTexture);
            mediaPlayer.setSurface(surface);
            currentState = STATE_PREPARING;
        }
        catch (Exception e) {

        }
    }

    public void start() {
        if (isInPlaybackState()) {
            mediaPlayer.start();
        }
        targetState = STATE_PLAYING;
    }

    public void pause() {
        if (isInPlaybackState()) {
            mediaPlayer.pause();
            currentState = STATE_PAUSED;
        }
        targetState = STATE_PAUSED;
    }

    public void stopPlayback() {
        if (mediaPlayer != null) {
            mediaPlayer.stop();
            mediaPlayer.release();
            mediaPlayer = null;
            currentState = STATE_IDLE;
            targetState  = STATE_IDLE;
        }
    }

    public int getCurrentPosition() {
        return mediaPlayer.getCurrentPosition();
    }

    public boolean isPlaying() {
        return mediaPlayer.isPlaying();
    }

    private boolean isInPlaybackState() {
        return mediaPlayer != null && (currentState == STATE_PLAYING || currentState == STATE_PREPARED);
    }

    private void release() {
        if (mediaPlayer != null) {
            mediaPlayer.reset();
            mediaPlayer.release();
        }
        if (surface != null) {
            surface.release();
        }
    }

    @Override
    public void onPrepared(MediaPlayer mp) {
        currentState = STATE_PREPARED;
        if (targetState == STATE_PLAYING) {
            start();
        }
        else if (targetState == STATE_PAUSED) {
            pause();
        }

        videoWidth = mediaPlayer.getVideoWidth();
        videoHeight = mediaPlayer.getVideoHeight();
        applyAspectRatio();
    }

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        surfaceTexture = surface;
        if (currentState == STATE_IDLE && uri != null) {
            setVideoURI(uri);
        }
    }

    @Override
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

    }

    @Override
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        return false;
    }

    @Override
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {

    }

    public void setAspectRatio(Channel.AspectRatio aspectRatio) {
        targetAspectRatio = aspectRatio;
        applyAspectRatio();
    }

    public void clearCurrentAspectRatio() {
        currentAspectRatio = null;
        videoWidth = 0;
        videoHeight = 0;
    }

    private void applyAspectRatio() {
        if (videoWidth == 0 || videoHeight == 0) {
            return;
        }
        currentAspectRatio = targetAspectRatio;
        System.out.println(currentAspectRatio.label);

        System.out.println("screen width: " + screenWidth);
        System.out.println("screen height: " + screenHeight);

        System.out.println("original video width: " + videoWidth);
        System.out.println("original video height: " + videoHeight);

        ViewGroup.LayoutParams params = getLayoutParams();

        if (currentAspectRatio.ratio == Channel.RATIO_FULL_WIDTH) {
            params.width = screenWidth;
            params.height = videoHeight * screenWidth / videoWidth;
        }
        else {
            params.height = screenHeight;
            switch (currentAspectRatio.ratio) {
                case (Channel.RATIO_16_9):
                    params.width = screenHeight * 16 / 9;
                    break;
                case (Channel.RATIO_4_3):
                    params.width = screenHeight * 4 / 3;
                    break;
                case (Channel.RATIO_ORIGINAL):
                    params.width = videoWidth * screenHeight / videoHeight;
                    break;

            }
        }

        System.out.println("video width: " + params.width);
        System.out.println("video height: " + params.height);

        if (params.width == getWidth() && params.height == getHeight()) {
            return;
        }

        setLayoutParams(params);
    }
}

更新

根据fadden的回答,我写了这段代码:

    TextureView textureView = new TextureView(this);
    LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
    textureView.setLayoutParams(params);
    ((ViewGroup)findViewById(android.R.id.content)).addView(textureView);
    textureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
        @Override
        public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
            try {
                //stand-alone Surface with TextureView to receive 
                //data from MediaPlayer
                Surface source = new Surface(new SurfaceTexture(111));

                EglCore mEglCore = new EglCore(null, EglCore.FLAG_TRY_GLES3);
                //WindowSurface backed by a SurfaceTexture that was received 
                //from a TextureView that is in my layout
                WindowSurface windowSurface = new WindowSurface(mEglCore, 
                        new Surface(surfaceTexture), true);
                //Make that WindowSurface read data from the source 
                //(stand-alone Surface), which in turn receives data 
                //from the MediaPlayer
                windowSurface.makeCurrentReadFrom(new WindowSurface(mEglCore, 
                        source, true));
                //Change the scaling mode. 
                //is it ok that I use GLES20.GL_TEXTURE_2D?
                GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, 
                        GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);

                MediaPlayer mediaPlayer = new MediaPlayer();
                //The MediaPlayer directs data to the 
                //stand-alone Surface, as a result the 
                //windowSurface must output that data with 
                //GL_TEXTURE_MAG_FILTER set to GLES20.GL_LINEAR
                mediaPlayer.setSurface(source);
                mediaPlayer.setDataSource(TestActivity.this, 
                        Uri.parse("http://some.source"));
                mediaPlayer.prepare();
                mediaPlayer.start();
            } catch (Exception e) {

            }
        }

        @Override
        public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {

        }

        @Override
        public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
            return false;
        }

        @Override
        public void onSurfaceTextureUpdated(SurfaceTexture surface) {

        }
    });

但我得到这个错误:

E/BufferQueue: [unnamed-28441-1] connect: already connected (cur=1, req=3)

怎么了?

更新 最后我得到了@fadden 建议的工作。但GL_LINEAR在我的情况下还不够。SurfaceView 使用更高级的东西(如双三次插值)。

我尝试使用 Bi-Cubic 插值GLSL作为片段着色器(来自此处的源:http: //www.codeproject.com/Articles/236394/Bi-Cubic-and-Bi-Linear-Interpolation-with-GLSL ) 但在我的情况下它不能正常工作。图片变得更暗,性能更差(〜5 fps),而且我得到水平和垂直条纹。有什么问题?

#extension GL_OES_EGL_image_external : require
precision mediump float;
varying vec2 vTextureCoord;
uniform samplerExternalOES sTexture;
uniform vec2 invScreenSize;

 float BellFunc( float f )
{
    float f = ( x / 2.0 ) * 1.5; // Converting -2 to +2 to -1.5 to +1.5
    if( f > -1.5 && f < -0.5 )
    {
        return( 0.5 * pow(f + 1.5, 2.0));
    }
    else if( f > -0.5 && f < 0.5 )
    {
        return 3.0 / 4.0 - ( f * f );
    }
    else if( ( f > 0.5 && f < 1.5 ) )
    {
        return( 0.5 * pow(f - 1.5, 2.0));
    }
    return 0.0;
}

vec4 BiCubic( samplerExternalOES textureSampler, vec2 TexCoord )
{
    float texelSizeX = 1.0 / invScreenSize.x; //size of one texel 
    float texelSizeY = 1.0 / invScreenSize.y; //size of one texel 
    vec4 nSum = vec4( 0.0, 0.0, 0.0, 0.0 );
    vec4 nDenom = vec4( 0.0, 0.0, 0.0, 0.0 );
    float a = fract( TexCoord.x * invScreenSize.x ); // get the decimal part
    float b = fract( TexCoord.y * invScreenSize.y ); // get the decimal part
    for( int m = -1; m <=2; m++ )
    {
        for( int n =-1; n<= 2; n++)
        {
        vec4 vecData = texture2D(textureSampler, 
               TexCoord + vec2(texelSizeX * float( m ), 
                texelSizeY * float( n )));
        float f  = BellFunc( float( m ) - a );
        vec4 vecCooef1 = vec4( f,f,f,f );
        float f1 = BellFunc ( -( float( n ) - b ) );
        vec4 vecCoeef2 = vec4( f1, f1, f1, f1 );
        nSum = nSum + ( vecData * vecCoeef2 * vecCooef1  );
        nDenom = nDenom + (( vecCoeef2 * vecCooef1 ));
        }
    }
    return nSum / nDenom;
}

void main() {
    gl_FragColor = BiCubic(sTexture, vTextureCoord);
}
4

0 回答 0