How to provide a shader function to perform the alpha masking for video












1















enter image description hereenter image description hereI am working on Vr app using virocore library in android. I have to show video over sphere. The video which i have to implement is not actually video but the two frames provided are the colour frame (left) and the alpha mask (right) frame. I have not worked with openGl but seems like I will need to provide a shader function to perform the alpha masking.



I have used this for shader Adding transparency to a video from black and white (and gray) alpha information video images



but how can i use it with OpenGL in on draw method? or If there is any way in virocore using which i can do alpha masking. I have tried chroma filtering method in virocore but that makes whole video transparent.



public class VideoSurfaceView   extends GLSurfaceView {

VideoRender mRenderer;
private MediaPlayer mMediaPlayer = null;

public VideoSurfaceView(Context context, MediaPlayer mp) {
super(context);

setEGLContextClientVersion(2);
mMediaPlayer = mp;
mRenderer = new VideoRender(context);

this.getHolder().setFormat(PixelFormat.RGB_565);
this.getHolder().setFormat(PixelFormat.TRANSPARENT);
setEGLConfigChooser(8,8,8,8,16,0);
setEGLContextClientVersion(2);

setRenderer(mRenderer);

}

@Override
public void onResume() {
Log.e("onResume ", "onResume");
queueEvent(new Runnable(){
public void run() {
Log.e("runnable ", "runnable");
mRenderer.setMediaPlayer(mMediaPlayer);
}});

super.onResume();
}

private static class VideoRender
implements Renderer, SurfaceTexture.OnFrameAvailableListener, MediaPlayer.OnPreparedListener {
private static String TAG = "VideoRender";

private static final int FLOAT_SIZE_BYTES = 4;
private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private final float mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};

private FloatBuffer mTriangleVertices;


private static final String mVertexShader =
"uniform mat4 uMVPMatrix;n" +
"uniform mat4 uSTMatrix;n" +
"attribute vec4 position;n" +
"attribute vec4 inputTextureCoordinate;n" +
" n" +
"varying vec2 textureCoordinate;n" +
"varying vec2 textureCoordinate2;n" +
" n" +
"void main()n" +
"{n" +
" gl_Position = uMVPMatrix * position;n" +
" vec4 texCoord = uSTMatrix * inputTextureCoordinate;n"+

"textureCoordinate = vec2(inputTextureCoordinate.x * 0.5, inputTextureCoordinate.y);n" +
" textureCoordinate2 = vec2(inputTextureCoordinate.x * 0.5 + 0.5, inputTextureCoordinate.y);n" +
"}";


public static final String mFragmentShader = "#extension GL_OES_EGL_image_external : requiren"+
"varying highp vec2 textureCoordinate;n"+
"varying highp vec2 textureCoordinate2;n"+
"uniform samplerExternalOES inputImageTexture;n" +
"void main() {n"+
" lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);n"+
" lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);n"+
" if (alphaValue.g < 0.5)n"+
" discard;n"+
" gl_FragColor = vec4(rgbcolor.rgb, 1.0);n"+
"}";

private float mMVPMatrix = new float[16];
private float mSTMatrix = new float[16];

private int mProgram;
private int mTextureID;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;

private SurfaceTexture mSurface;
private boolean updateSurface = false;

private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

private MediaPlayer mMediaPlayer;

public VideoRender(Context context) {
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);

Matrix.setIdentityM(mSTMatrix, 0);

}

public void setMediaPlayer(MediaPlayer player) {
mMediaPlayer = player;
}

@Override
public void onDrawFrame(GL10 glUnused) {
synchronized(this) {
if (updateSurface) {
mSurface.updateTexImage();
mSurface.getTransformMatrix(mSTMatrix);
updateSurface = false;
}
}

GLES20.glClearColor(0.0f, 0.0f, 0.0f, .0f);
GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT|GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);

GLES20.glUseProgram(mProgram);
checkGlError("glUseProgram");

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maPosition");
GLES20.glEnableVertexAttribArray(maPositionHandle);
checkGlError("glEnableVertexAttribArray maPositionHandle");

mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer maTextureHandle");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray maTextureHandle");

Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays");
GLES20.glFinish();

}

@Override
public void onSurfaceChanged(GL10 glUnused, int width, int height) {

}

@Override
public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
mProgram = createProgram(mVertexShader, mFragmentShader);
if (mProgram == 0) {
return;
}
maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
checkGlError("glGetAttribLocation aPosition");
if (maPositionHandle == -1) {
throw new RuntimeException("Could not get attrib location for aPosition");
}
maTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
checkGlError("glGetAttribLocation aTextureCoord");
if (maTextureHandle == -1) {
throw new RuntimeException("Could not get attrib location for aTextureCoord");
}

muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
checkGlError("glGetUniformLocation uMVPMatrix");
if (muMVPMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uMVPMatrix");
}

muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
checkGlError("glGetUniformLocation uSTMatrix");
if (muSTMatrixHandle == -1) {
throw new RuntimeException("Could not get attrib location for uSTMatrix");
}


int textures = new int[1];
GLES20.glGenTextures(1, textures, 0);

mTextureID = textures[0];
GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
checkGlError("glBindTexture mTextureID");

GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
GLES20.GL_NEAREST);
GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
GLES20.GL_LINEAR);

/*
* Create the SurfaceTexture that will feed this textureID,
* and pass it to the MediaPlayer
*/
mSurface = new SurfaceTexture(mTextureID);
mSurface.setOnFrameAvailableListener(this);
Log.e("surface ", "surface");
Surface surface = new Surface(mSurface);
mMediaPlayer.setSurface(surface);
mMediaPlayer.setScreenOnWhilePlaying(true);
surface.release();
mMediaPlayer.setOnPreparedListener(this);
mMediaPlayer.prepareAsync();

synchronized(this) {
updateSurface = false;
}
}

synchronized public void onFrameAvailable(SurfaceTexture surface) {
updateSurface = true;
}

private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e(TAG, "Could not compile shader " + shaderType + ":");
Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}

private int createProgram(String vertexSource, String fragmentSource) {
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
if (vertexShader == 0) {
return 0;
}
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if (pixelShader == 0) {
return 0;
}

int program = GLES20.glCreateProgram();
if (program != 0) {
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int linkStatus = new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if (linkStatus[0] != GLES20.GL_TRUE) {
Log.e(TAG, "Could not link program: ");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
}
}
return program;
}

private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e(TAG, op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}

@Override
public void onPrepared(MediaPlayer mediaPlayer) {
mediaPlayer.start();
}
} // End of class VideoRender.


} // End of class VideoSurfaceView.



With this code, video looks inverted










share|improve this question





























    1















    enter image description hereenter image description hereI am working on Vr app using virocore library in android. I have to show video over sphere. The video which i have to implement is not actually video but the two frames provided are the colour frame (left) and the alpha mask (right) frame. I have not worked with openGl but seems like I will need to provide a shader function to perform the alpha masking.



    I have used this for shader Adding transparency to a video from black and white (and gray) alpha information video images



    but how can i use it with OpenGL in on draw method? or If there is any way in virocore using which i can do alpha masking. I have tried chroma filtering method in virocore but that makes whole video transparent.



    public class VideoSurfaceView   extends GLSurfaceView {

    VideoRender mRenderer;
    private MediaPlayer mMediaPlayer = null;

    public VideoSurfaceView(Context context, MediaPlayer mp) {
    super(context);

    setEGLContextClientVersion(2);
    mMediaPlayer = mp;
    mRenderer = new VideoRender(context);

    this.getHolder().setFormat(PixelFormat.RGB_565);
    this.getHolder().setFormat(PixelFormat.TRANSPARENT);
    setEGLConfigChooser(8,8,8,8,16,0);
    setEGLContextClientVersion(2);

    setRenderer(mRenderer);

    }

    @Override
    public void onResume() {
    Log.e("onResume ", "onResume");
    queueEvent(new Runnable(){
    public void run() {
    Log.e("runnable ", "runnable");
    mRenderer.setMediaPlayer(mMediaPlayer);
    }});

    super.onResume();
    }

    private static class VideoRender
    implements Renderer, SurfaceTexture.OnFrameAvailableListener, MediaPlayer.OnPreparedListener {
    private static String TAG = "VideoRender";

    private static final int FLOAT_SIZE_BYTES = 4;
    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
    private final float mTriangleVerticesData = {
    // X, Y, Z, U, V
    -1.0f, -1.0f, 0, 0.f, 0.f,
    1.0f, -1.0f, 0, 1.f, 0.f,
    -1.0f, 1.0f, 0, 0.f, 1.f,
    1.0f, 1.0f, 0, 1.f, 1.f,
    };

    private FloatBuffer mTriangleVertices;


    private static final String mVertexShader =
    "uniform mat4 uMVPMatrix;n" +
    "uniform mat4 uSTMatrix;n" +
    "attribute vec4 position;n" +
    "attribute vec4 inputTextureCoordinate;n" +
    " n" +
    "varying vec2 textureCoordinate;n" +
    "varying vec2 textureCoordinate2;n" +
    " n" +
    "void main()n" +
    "{n" +
    " gl_Position = uMVPMatrix * position;n" +
    " vec4 texCoord = uSTMatrix * inputTextureCoordinate;n"+

    "textureCoordinate = vec2(inputTextureCoordinate.x * 0.5, inputTextureCoordinate.y);n" +
    " textureCoordinate2 = vec2(inputTextureCoordinate.x * 0.5 + 0.5, inputTextureCoordinate.y);n" +
    "}";


    public static final String mFragmentShader = "#extension GL_OES_EGL_image_external : requiren"+
    "varying highp vec2 textureCoordinate;n"+
    "varying highp vec2 textureCoordinate2;n"+
    "uniform samplerExternalOES inputImageTexture;n" +
    "void main() {n"+
    " lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);n"+
    " lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);n"+
    " if (alphaValue.g < 0.5)n"+
    " discard;n"+
    " gl_FragColor = vec4(rgbcolor.rgb, 1.0);n"+
    "}";

    private float mMVPMatrix = new float[16];
    private float mSTMatrix = new float[16];

    private int mProgram;
    private int mTextureID;
    private int muMVPMatrixHandle;
    private int muSTMatrixHandle;
    private int maPositionHandle;
    private int maTextureHandle;

    private SurfaceTexture mSurface;
    private boolean updateSurface = false;

    private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

    private MediaPlayer mMediaPlayer;

    public VideoRender(Context context) {
    mTriangleVertices = ByteBuffer.allocateDirect(
    mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
    .order(ByteOrder.nativeOrder()).asFloatBuffer();
    mTriangleVertices.put(mTriangleVerticesData).position(0);

    Matrix.setIdentityM(mSTMatrix, 0);

    }

    public void setMediaPlayer(MediaPlayer player) {
    mMediaPlayer = player;
    }

    @Override
    public void onDrawFrame(GL10 glUnused) {
    synchronized(this) {
    if (updateSurface) {
    mSurface.updateTexImage();
    mSurface.getTransformMatrix(mSTMatrix);
    updateSurface = false;
    }
    }

    GLES20.glClearColor(0.0f, 0.0f, 0.0f, .0f);
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT|GLES20.GL_COLOR_BUFFER_BIT);
    GLES20.glEnable(GLES20.GL_BLEND);
    GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);

    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
    TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
    TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");

    Matrix.setIdentityM(mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    checkGlError("glDrawArrays");
    GLES20.glFinish();

    }

    @Override
    public void onSurfaceChanged(GL10 glUnused, int width, int height) {

    }

    @Override
    public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
    mProgram = createProgram(mVertexShader, mFragmentShader);
    if (mProgram == 0) {
    return;
    }
    maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
    checkGlError("glGetAttribLocation aPosition");
    if (maPositionHandle == -1) {
    throw new RuntimeException("Could not get attrib location for aPosition");
    }
    maTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
    checkGlError("glGetAttribLocation aTextureCoord");
    if (maTextureHandle == -1) {
    throw new RuntimeException("Could not get attrib location for aTextureCoord");
    }

    muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
    checkGlError("glGetUniformLocation uMVPMatrix");
    if (muMVPMatrixHandle == -1) {
    throw new RuntimeException("Could not get attrib location for uMVPMatrix");
    }

    muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
    checkGlError("glGetUniformLocation uSTMatrix");
    if (muSTMatrixHandle == -1) {
    throw new RuntimeException("Could not get attrib location for uSTMatrix");
    }


    int textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);

    mTextureID = textures[0];
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
    checkGlError("glBindTexture mTextureID");

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
    GLES20.GL_NEAREST);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
    GLES20.GL_LINEAR);

    /*
    * Create the SurfaceTexture that will feed this textureID,
    * and pass it to the MediaPlayer
    */
    mSurface = new SurfaceTexture(mTextureID);
    mSurface.setOnFrameAvailableListener(this);
    Log.e("surface ", "surface");
    Surface surface = new Surface(mSurface);
    mMediaPlayer.setSurface(surface);
    mMediaPlayer.setScreenOnWhilePlaying(true);
    surface.release();
    mMediaPlayer.setOnPreparedListener(this);
    mMediaPlayer.prepareAsync();

    synchronized(this) {
    updateSurface = false;
    }
    }

    synchronized public void onFrameAvailable(SurfaceTexture surface) {
    updateSurface = true;
    }

    private int loadShader(int shaderType, String source) {
    int shader = GLES20.glCreateShader(shaderType);
    if (shader != 0) {
    GLES20.glShaderSource(shader, source);
    GLES20.glCompileShader(shader);
    int compiled = new int[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
    if (compiled[0] == 0) {
    Log.e(TAG, "Could not compile shader " + shaderType + ":");
    Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
    GLES20.glDeleteShader(shader);
    shader = 0;
    }
    }
    return shader;
    }

    private int createProgram(String vertexSource, String fragmentSource) {
    int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
    if (vertexShader == 0) {
    return 0;
    }
    int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
    if (pixelShader == 0) {
    return 0;
    }

    int program = GLES20.glCreateProgram();
    if (program != 0) {
    GLES20.glAttachShader(program, vertexShader);
    checkGlError("glAttachShader");
    GLES20.glAttachShader(program, pixelShader);
    checkGlError("glAttachShader");
    GLES20.glLinkProgram(program);
    int linkStatus = new int[1];
    GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
    if (linkStatus[0] != GLES20.GL_TRUE) {
    Log.e(TAG, "Could not link program: ");
    Log.e(TAG, GLES20.glGetProgramInfoLog(program));
    GLES20.glDeleteProgram(program);
    program = 0;
    }
    }
    return program;
    }

    private void checkGlError(String op) {
    int error;
    while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
    Log.e(TAG, op + ": glError " + error);
    throw new RuntimeException(op + ": glError " + error);
    }
    }

    @Override
    public void onPrepared(MediaPlayer mediaPlayer) {
    mediaPlayer.start();
    }
    } // End of class VideoRender.


    } // End of class VideoSurfaceView.



    With this code, video looks inverted










    share|improve this question



























      1












      1








      1








      enter image description hereenter image description hereI am working on Vr app using virocore library in android. I have to show video over sphere. The video which i have to implement is not actually video but the two frames provided are the colour frame (left) and the alpha mask (right) frame. I have not worked with openGl but seems like I will need to provide a shader function to perform the alpha masking.



      I have used this for shader Adding transparency to a video from black and white (and gray) alpha information video images



      but how can i use it with OpenGL in on draw method? or If there is any way in virocore using which i can do alpha masking. I have tried chroma filtering method in virocore but that makes whole video transparent.



      public class VideoSurfaceView   extends GLSurfaceView {

      VideoRender mRenderer;
      private MediaPlayer mMediaPlayer = null;

      public VideoSurfaceView(Context context, MediaPlayer mp) {
      super(context);

      setEGLContextClientVersion(2);
      mMediaPlayer = mp;
      mRenderer = new VideoRender(context);

      this.getHolder().setFormat(PixelFormat.RGB_565);
      this.getHolder().setFormat(PixelFormat.TRANSPARENT);
      setEGLConfigChooser(8,8,8,8,16,0);
      setEGLContextClientVersion(2);

      setRenderer(mRenderer);

      }

      @Override
      public void onResume() {
      Log.e("onResume ", "onResume");
      queueEvent(new Runnable(){
      public void run() {
      Log.e("runnable ", "runnable");
      mRenderer.setMediaPlayer(mMediaPlayer);
      }});

      super.onResume();
      }

      private static class VideoRender
      implements Renderer, SurfaceTexture.OnFrameAvailableListener, MediaPlayer.OnPreparedListener {
      private static String TAG = "VideoRender";

      private static final int FLOAT_SIZE_BYTES = 4;
      private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
      private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
      private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
      private final float mTriangleVerticesData = {
      // X, Y, Z, U, V
      -1.0f, -1.0f, 0, 0.f, 0.f,
      1.0f, -1.0f, 0, 1.f, 0.f,
      -1.0f, 1.0f, 0, 0.f, 1.f,
      1.0f, 1.0f, 0, 1.f, 1.f,
      };

      private FloatBuffer mTriangleVertices;


      private static final String mVertexShader =
      "uniform mat4 uMVPMatrix;n" +
      "uniform mat4 uSTMatrix;n" +
      "attribute vec4 position;n" +
      "attribute vec4 inputTextureCoordinate;n" +
      " n" +
      "varying vec2 textureCoordinate;n" +
      "varying vec2 textureCoordinate2;n" +
      " n" +
      "void main()n" +
      "{n" +
      " gl_Position = uMVPMatrix * position;n" +
      " vec4 texCoord = uSTMatrix * inputTextureCoordinate;n"+

      "textureCoordinate = vec2(inputTextureCoordinate.x * 0.5, inputTextureCoordinate.y);n" +
      " textureCoordinate2 = vec2(inputTextureCoordinate.x * 0.5 + 0.5, inputTextureCoordinate.y);n" +
      "}";


      public static final String mFragmentShader = "#extension GL_OES_EGL_image_external : requiren"+
      "varying highp vec2 textureCoordinate;n"+
      "varying highp vec2 textureCoordinate2;n"+
      "uniform samplerExternalOES inputImageTexture;n" +
      "void main() {n"+
      " lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);n"+
      " lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);n"+
      " if (alphaValue.g < 0.5)n"+
      " discard;n"+
      " gl_FragColor = vec4(rgbcolor.rgb, 1.0);n"+
      "}";

      private float mMVPMatrix = new float[16];
      private float mSTMatrix = new float[16];

      private int mProgram;
      private int mTextureID;
      private int muMVPMatrixHandle;
      private int muSTMatrixHandle;
      private int maPositionHandle;
      private int maTextureHandle;

      private SurfaceTexture mSurface;
      private boolean updateSurface = false;

      private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

      private MediaPlayer mMediaPlayer;

      public VideoRender(Context context) {
      mTriangleVertices = ByteBuffer.allocateDirect(
      mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
      .order(ByteOrder.nativeOrder()).asFloatBuffer();
      mTriangleVertices.put(mTriangleVerticesData).position(0);

      Matrix.setIdentityM(mSTMatrix, 0);

      }

      public void setMediaPlayer(MediaPlayer player) {
      mMediaPlayer = player;
      }

      @Override
      public void onDrawFrame(GL10 glUnused) {
      synchronized(this) {
      if (updateSurface) {
      mSurface.updateTexImage();
      mSurface.getTransformMatrix(mSTMatrix);
      updateSurface = false;
      }
      }

      GLES20.glClearColor(0.0f, 0.0f, 0.0f, .0f);
      GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT|GLES20.GL_COLOR_BUFFER_BIT);
      GLES20.glEnable(GLES20.GL_BLEND);
      GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);

      GLES20.glUseProgram(mProgram);
      checkGlError("glUseProgram");

      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
      GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
      checkGlError("glVertexAttribPointer maPosition");
      GLES20.glEnableVertexAttribArray(maPositionHandle);
      checkGlError("glEnableVertexAttribArray maPositionHandle");

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
      GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
      checkGlError("glVertexAttribPointer maTextureHandle");
      GLES20.glEnableVertexAttribArray(maTextureHandle);
      checkGlError("glEnableVertexAttribArray maTextureHandle");

      Matrix.setIdentityM(mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
      checkGlError("glDrawArrays");
      GLES20.glFinish();

      }

      @Override
      public void onSurfaceChanged(GL10 glUnused, int width, int height) {

      }

      @Override
      public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
      mProgram = createProgram(mVertexShader, mFragmentShader);
      if (mProgram == 0) {
      return;
      }
      maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
      checkGlError("glGetAttribLocation aPosition");
      if (maPositionHandle == -1) {
      throw new RuntimeException("Could not get attrib location for aPosition");
      }
      maTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
      checkGlError("glGetAttribLocation aTextureCoord");
      if (maTextureHandle == -1) {
      throw new RuntimeException("Could not get attrib location for aTextureCoord");
      }

      muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
      checkGlError("glGetUniformLocation uMVPMatrix");
      if (muMVPMatrixHandle == -1) {
      throw new RuntimeException("Could not get attrib location for uMVPMatrix");
      }

      muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
      checkGlError("glGetUniformLocation uSTMatrix");
      if (muSTMatrixHandle == -1) {
      throw new RuntimeException("Could not get attrib location for uSTMatrix");
      }


      int textures = new int[1];
      GLES20.glGenTextures(1, textures, 0);

      mTextureID = textures[0];
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
      checkGlError("glBindTexture mTextureID");

      GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
      GLES20.GL_NEAREST);
      GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
      GLES20.GL_LINEAR);

      /*
      * Create the SurfaceTexture that will feed this textureID,
      * and pass it to the MediaPlayer
      */
      mSurface = new SurfaceTexture(mTextureID);
      mSurface.setOnFrameAvailableListener(this);
      Log.e("surface ", "surface");
      Surface surface = new Surface(mSurface);
      mMediaPlayer.setSurface(surface);
      mMediaPlayer.setScreenOnWhilePlaying(true);
      surface.release();
      mMediaPlayer.setOnPreparedListener(this);
      mMediaPlayer.prepareAsync();

      synchronized(this) {
      updateSurface = false;
      }
      }

      synchronized public void onFrameAvailable(SurfaceTexture surface) {
      updateSurface = true;
      }

      private int loadShader(int shaderType, String source) {
      int shader = GLES20.glCreateShader(shaderType);
      if (shader != 0) {
      GLES20.glShaderSource(shader, source);
      GLES20.glCompileShader(shader);
      int compiled = new int[1];
      GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
      if (compiled[0] == 0) {
      Log.e(TAG, "Could not compile shader " + shaderType + ":");
      Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
      }
      }
      return shader;
      }

      private int createProgram(String vertexSource, String fragmentSource) {
      int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
      if (vertexShader == 0) {
      return 0;
      }
      int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
      if (pixelShader == 0) {
      return 0;
      }

      int program = GLES20.glCreateProgram();
      if (program != 0) {
      GLES20.glAttachShader(program, vertexShader);
      checkGlError("glAttachShader");
      GLES20.glAttachShader(program, pixelShader);
      checkGlError("glAttachShader");
      GLES20.glLinkProgram(program);
      int linkStatus = new int[1];
      GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
      if (linkStatus[0] != GLES20.GL_TRUE) {
      Log.e(TAG, "Could not link program: ");
      Log.e(TAG, GLES20.glGetProgramInfoLog(program));
      GLES20.glDeleteProgram(program);
      program = 0;
      }
      }
      return program;
      }

      private void checkGlError(String op) {
      int error;
      while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
      Log.e(TAG, op + ": glError " + error);
      throw new RuntimeException(op + ": glError " + error);
      }
      }

      @Override
      public void onPrepared(MediaPlayer mediaPlayer) {
      mediaPlayer.start();
      }
      } // End of class VideoRender.


      } // End of class VideoSurfaceView.



      With this code, video looks inverted










      share|improve this question
















      enter image description hereenter image description hereI am working on Vr app using virocore library in android. I have to show video over sphere. The video which i have to implement is not actually video but the two frames provided are the colour frame (left) and the alpha mask (right) frame. I have not worked with openGl but seems like I will need to provide a shader function to perform the alpha masking.



      I have used this for shader Adding transparency to a video from black and white (and gray) alpha information video images



      but how can i use it with OpenGL in on draw method? or If there is any way in virocore using which i can do alpha masking. I have tried chroma filtering method in virocore but that makes whole video transparent.



      public class VideoSurfaceView   extends GLSurfaceView {

      VideoRender mRenderer;
      private MediaPlayer mMediaPlayer = null;

      public VideoSurfaceView(Context context, MediaPlayer mp) {
      super(context);

      setEGLContextClientVersion(2);
      mMediaPlayer = mp;
      mRenderer = new VideoRender(context);

      this.getHolder().setFormat(PixelFormat.RGB_565);
      this.getHolder().setFormat(PixelFormat.TRANSPARENT);
      setEGLConfigChooser(8,8,8,8,16,0);
      setEGLContextClientVersion(2);

      setRenderer(mRenderer);

      }

      @Override
      public void onResume() {
      Log.e("onResume ", "onResume");
      queueEvent(new Runnable(){
      public void run() {
      Log.e("runnable ", "runnable");
      mRenderer.setMediaPlayer(mMediaPlayer);
      }});

      super.onResume();
      }

      private static class VideoRender
      implements Renderer, SurfaceTexture.OnFrameAvailableListener, MediaPlayer.OnPreparedListener {
      private static String TAG = "VideoRender";

      private static final int FLOAT_SIZE_BYTES = 4;
      private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
      private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
      private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
      private final float mTriangleVerticesData = {
      // X, Y, Z, U, V
      -1.0f, -1.0f, 0, 0.f, 0.f,
      1.0f, -1.0f, 0, 1.f, 0.f,
      -1.0f, 1.0f, 0, 0.f, 1.f,
      1.0f, 1.0f, 0, 1.f, 1.f,
      };

      private FloatBuffer mTriangleVertices;


      private static final String mVertexShader =
      "uniform mat4 uMVPMatrix;n" +
      "uniform mat4 uSTMatrix;n" +
      "attribute vec4 position;n" +
      "attribute vec4 inputTextureCoordinate;n" +
      " n" +
      "varying vec2 textureCoordinate;n" +
      "varying vec2 textureCoordinate2;n" +
      " n" +
      "void main()n" +
      "{n" +
      " gl_Position = uMVPMatrix * position;n" +
      " vec4 texCoord = uSTMatrix * inputTextureCoordinate;n"+

      "textureCoordinate = vec2(inputTextureCoordinate.x * 0.5, inputTextureCoordinate.y);n" +
      " textureCoordinate2 = vec2(inputTextureCoordinate.x * 0.5 + 0.5, inputTextureCoordinate.y);n" +
      "}";


      public static final String mFragmentShader = "#extension GL_OES_EGL_image_external : requiren"+
      "varying highp vec2 textureCoordinate;n"+
      "varying highp vec2 textureCoordinate2;n"+
      "uniform samplerExternalOES inputImageTexture;n" +
      "void main() {n"+
      " lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);n"+
      " lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);n"+
      " if (alphaValue.g < 0.5)n"+
      " discard;n"+
      " gl_FragColor = vec4(rgbcolor.rgb, 1.0);n"+
      "}";

      private float mMVPMatrix = new float[16];
      private float mSTMatrix = new float[16];

      private int mProgram;
      private int mTextureID;
      private int muMVPMatrixHandle;
      private int muSTMatrixHandle;
      private int maPositionHandle;
      private int maTextureHandle;

      private SurfaceTexture mSurface;
      private boolean updateSurface = false;

      private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

      private MediaPlayer mMediaPlayer;

      public VideoRender(Context context) {
      mTriangleVertices = ByteBuffer.allocateDirect(
      mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
      .order(ByteOrder.nativeOrder()).asFloatBuffer();
      mTriangleVertices.put(mTriangleVerticesData).position(0);

      Matrix.setIdentityM(mSTMatrix, 0);

      }

      public void setMediaPlayer(MediaPlayer player) {
      mMediaPlayer = player;
      }

      @Override
      public void onDrawFrame(GL10 glUnused) {
      synchronized(this) {
      if (updateSurface) {
      mSurface.updateTexImage();
      mSurface.getTransformMatrix(mSTMatrix);
      updateSurface = false;
      }
      }

      GLES20.glClearColor(0.0f, 0.0f, 0.0f, .0f);
      GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT|GLES20.GL_COLOR_BUFFER_BIT);
      GLES20.glEnable(GLES20.GL_BLEND);
      GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);

      GLES20.glUseProgram(mProgram);
      checkGlError("glUseProgram");

      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
      GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
      checkGlError("glVertexAttribPointer maPosition");
      GLES20.glEnableVertexAttribArray(maPositionHandle);
      checkGlError("glEnableVertexAttribArray maPositionHandle");

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
      GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
      checkGlError("glVertexAttribPointer maTextureHandle");
      GLES20.glEnableVertexAttribArray(maTextureHandle);
      checkGlError("glEnableVertexAttribArray maTextureHandle");

      Matrix.setIdentityM(mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
      checkGlError("glDrawArrays");
      GLES20.glFinish();

      }

      @Override
      public void onSurfaceChanged(GL10 glUnused, int width, int height) {

      }

      @Override
      public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
      mProgram = createProgram(mVertexShader, mFragmentShader);
      if (mProgram == 0) {
      return;
      }
      maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
      checkGlError("glGetAttribLocation aPosition");
      if (maPositionHandle == -1) {
      throw new RuntimeException("Could not get attrib location for aPosition");
      }
      maTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
      checkGlError("glGetAttribLocation aTextureCoord");
      if (maTextureHandle == -1) {
      throw new RuntimeException("Could not get attrib location for aTextureCoord");
      }

      muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
      checkGlError("glGetUniformLocation uMVPMatrix");
      if (muMVPMatrixHandle == -1) {
      throw new RuntimeException("Could not get attrib location for uMVPMatrix");
      }

      muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
      checkGlError("glGetUniformLocation uSTMatrix");
      if (muSTMatrixHandle == -1) {
      throw new RuntimeException("Could not get attrib location for uSTMatrix");
      }


      int textures = new int[1];
      GLES20.glGenTextures(1, textures, 0);

      mTextureID = textures[0];
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
      checkGlError("glBindTexture mTextureID");

      GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
      GLES20.GL_NEAREST);
      GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
      GLES20.GL_LINEAR);

      /*
      * Create the SurfaceTexture that will feed this textureID,
      * and pass it to the MediaPlayer
      */
      mSurface = new SurfaceTexture(mTextureID);
      mSurface.setOnFrameAvailableListener(this);
      Log.e("surface ", "surface");
      Surface surface = new Surface(mSurface);
      mMediaPlayer.setSurface(surface);
      mMediaPlayer.setScreenOnWhilePlaying(true);
      surface.release();
      mMediaPlayer.setOnPreparedListener(this);
      mMediaPlayer.prepareAsync();

      synchronized(this) {
      updateSurface = false;
      }
      }

      synchronized public void onFrameAvailable(SurfaceTexture surface) {
      updateSurface = true;
      }

      private int loadShader(int shaderType, String source) {
      int shader = GLES20.glCreateShader(shaderType);
      if (shader != 0) {
      GLES20.glShaderSource(shader, source);
      GLES20.glCompileShader(shader);
      int compiled = new int[1];
      GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
      if (compiled[0] == 0) {
      Log.e(TAG, "Could not compile shader " + shaderType + ":");
      Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
      }
      }
      return shader;
      }

      private int createProgram(String vertexSource, String fragmentSource) {
      int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
      if (vertexShader == 0) {
      return 0;
      }
      int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
      if (pixelShader == 0) {
      return 0;
      }

      int program = GLES20.glCreateProgram();
      if (program != 0) {
      GLES20.glAttachShader(program, vertexShader);
      checkGlError("glAttachShader");
      GLES20.glAttachShader(program, pixelShader);
      checkGlError("glAttachShader");
      GLES20.glLinkProgram(program);
      int linkStatus = new int[1];
      GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
      if (linkStatus[0] != GLES20.GL_TRUE) {
      Log.e(TAG, "Could not link program: ");
      Log.e(TAG, GLES20.glGetProgramInfoLog(program));
      GLES20.glDeleteProgram(program);
      program = 0;
      }
      }
      return program;
      }

      private void checkGlError(String op) {
      int error;
      while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
      Log.e(TAG, op + ": glError " + error);
      throw new RuntimeException(op + ": glError " + error);
      }
      }

      @Override
      public void onPrepared(MediaPlayer mediaPlayer) {
      mediaPlayer.start();
      }
      } // End of class VideoRender.


      } // End of class VideoSurfaceView.



      With this code, video looks inverted







      android opengl-es glsl vertex-shader glsurfaceview






      share|improve this question















      share|improve this question













      share|improve this question




      share|improve this question








      edited Dec 31 '18 at 18:59









      Rabbid76

      37.1k113247




      37.1k113247










      asked Dec 31 '18 at 9:27









      apurvaapurva

      5219




      5219
























          1 Answer
          1






          active

          oldest

          votes


















          0














          If you want to discard fragments, then you can use the discard keyword in the fragment shader.



          e.g. discard all fragments with an alpha value less than 0.5:



          void main()
          {
          lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);
          lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

          if (alphaValue.g < 0.5)
          discard;

          gl_FragColor = vec4(rgbcolor.rgb, 1.0);
          }


          See also OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58:




          The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:



          if (intensity < 0.0)
          discard;





          In reference to the comment




          As there is no matrix so what will i use in GLES20.glGetUniformLocation?




          Of course you can add the matrices to the vertex shader:



          attribute vec4 inputTextureCoordinate;

          varying vec2 textureCoordinate;
          varying vec2 textureCoordinate2;

          uniform mat4 matMVP;
          uniform mat4 matST;

          void main()
          {
          gl_Position = matMVP * position;

          vec4 texCoord = matST * inputTextureCoordinate;

          textureCoordinate = vec2(texCoord.x * 0.5, 1.0 - texCoord.y);
          textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
          }


          muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
          muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matST");


          GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
          GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);





          share|improve this answer


























          • Awesome! It worked. Thank you so much.

            – apurva
            Dec 31 '18 at 17:21













          • @apurva You're welcome. I'm pleased to help you.

            – Rabbid76
            Dec 31 '18 at 17:40











          Your Answer






          StackExchange.ifUsing("editor", function () {
          StackExchange.using("externalEditor", function () {
          StackExchange.using("snippets", function () {
          StackExchange.snippets.init();
          });
          });
          }, "code-snippets");

          StackExchange.ready(function() {
          var channelOptions = {
          tags: "".split(" "),
          id: "1"
          };
          initTagRenderer("".split(" "), "".split(" "), channelOptions);

          StackExchange.using("externalEditor", function() {
          // Have to fire editor after snippets, if snippets enabled
          if (StackExchange.settings.snippets.snippetsEnabled) {
          StackExchange.using("snippets", function() {
          createEditor();
          });
          }
          else {
          createEditor();
          }
          });

          function createEditor() {
          StackExchange.prepareEditor({
          heartbeatType: 'answer',
          autoActivateHeartbeat: false,
          convertImagesToLinks: true,
          noModals: true,
          showLowRepImageUploadWarning: true,
          reputationToPostImages: 10,
          bindNavPrevention: true,
          postfix: "",
          imageUploader: {
          brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
          contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
          allowUrls: true
          },
          onDemand: true,
          discardSelector: ".discard-answer"
          ,immediatelyShowMarkdownHelp:true
          });


          }
          });














          draft saved

          draft discarded


















          StackExchange.ready(
          function () {
          StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53985760%2fhow-to-provide-a-shader-function-to-perform-the-alpha-masking-for-video%23new-answer', 'question_page');
          }
          );

          Post as a guest















          Required, but never shown

























          1 Answer
          1






          active

          oldest

          votes








          1 Answer
          1






          active

          oldest

          votes









          active

          oldest

          votes






          active

          oldest

          votes









          0














          If you want to discard fragments, then you can use the discard keyword in the fragment shader.



          e.g. discard all fragments with an alpha value less than 0.5:



          void main()
          {
          lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);
          lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

          if (alphaValue.g < 0.5)
          discard;

          gl_FragColor = vec4(rgbcolor.rgb, 1.0);
          }


          See also OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58:




          The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:



          if (intensity < 0.0)
          discard;





          In reference to the comment




          As there is no matrix so what will i use in GLES20.glGetUniformLocation?




          Of course you can add the matrices to the vertex shader:



          attribute vec4 inputTextureCoordinate;

          varying vec2 textureCoordinate;
          varying vec2 textureCoordinate2;

          uniform mat4 matMVP;
          uniform mat4 matST;

          void main()
          {
          gl_Position = matMVP * position;

          vec4 texCoord = matST * inputTextureCoordinate;

          textureCoordinate = vec2(texCoord.x * 0.5, 1.0 - texCoord.y);
          textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
          }


          muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
          muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matST");


          GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
          GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);





          share|improve this answer


























          • Awesome! It worked. Thank you so much.

            – apurva
            Dec 31 '18 at 17:21













          • @apurva You're welcome. I'm pleased to help you.

            – Rabbid76
            Dec 31 '18 at 17:40
















          0














          If you want to discard fragments, then you can use the discard keyword in the fragment shader.



          e.g. discard all fragments with an alpha value less than 0.5:



          void main()
          {
          lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);
          lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

          if (alphaValue.g < 0.5)
          discard;

          gl_FragColor = vec4(rgbcolor.rgb, 1.0);
          }


          See also OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58:




          The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:



          if (intensity < 0.0)
          discard;





          In reference to the comment




          As there is no matrix so what will i use in GLES20.glGetUniformLocation?




          Of course you can add the matrices to the vertex shader:



          attribute vec4 inputTextureCoordinate;

          varying vec2 textureCoordinate;
          varying vec2 textureCoordinate2;

          uniform mat4 matMVP;
          uniform mat4 matST;

          void main()
          {
          gl_Position = matMVP * position;

          vec4 texCoord = matST * inputTextureCoordinate;

          textureCoordinate = vec2(texCoord.x * 0.5, 1.0 - texCoord.y);
          textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
          }


          muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
          muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matST");


          GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
          GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);





          share|improve this answer


























          • Awesome! It worked. Thank you so much.

            – apurva
            Dec 31 '18 at 17:21













          • @apurva You're welcome. I'm pleased to help you.

            – Rabbid76
            Dec 31 '18 at 17:40














          0












          0








          0







          If you want to discard fragments, then you can use the discard keyword in the fragment shader.



          e.g. discard all fragments with an alpha value less than 0.5:



          void main()
          {
          lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);
          lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

          if (alphaValue.g < 0.5)
          discard;

          gl_FragColor = vec4(rgbcolor.rgb, 1.0);
          }


          See also OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58:




          The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:



          if (intensity < 0.0)
          discard;





          In reference to the comment




          As there is no matrix so what will i use in GLES20.glGetUniformLocation?




          Of course you can add the matrices to the vertex shader:



          attribute vec4 inputTextureCoordinate;

          varying vec2 textureCoordinate;
          varying vec2 textureCoordinate2;

          uniform mat4 matMVP;
          uniform mat4 matST;

          void main()
          {
          gl_Position = matMVP * position;

          vec4 texCoord = matST * inputTextureCoordinate;

          textureCoordinate = vec2(texCoord.x * 0.5, 1.0 - texCoord.y);
          textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
          }


          muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
          muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matST");


          GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
          GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);





          share|improve this answer















          If you want to discard fragments, then you can use the discard keyword in the fragment shader.



          e.g. discard all fragments with an alpha value less than 0.5:



          void main()
          {
          lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);
          lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

          if (alphaValue.g < 0.5)
          discard;

          gl_FragColor = vec4(rgbcolor.rgb, 1.0);
          }


          See also OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58:




          The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:



          if (intensity < 0.0)
          discard;





          In reference to the comment




          As there is no matrix so what will i use in GLES20.glGetUniformLocation?




          Of course you can add the matrices to the vertex shader:



          attribute vec4 inputTextureCoordinate;

          varying vec2 textureCoordinate;
          varying vec2 textureCoordinate2;

          uniform mat4 matMVP;
          uniform mat4 matST;

          void main()
          {
          gl_Position = matMVP * position;

          vec4 texCoord = matST * inputTextureCoordinate;

          textureCoordinate = vec2(texCoord.x * 0.5, 1.0 - texCoord.y);
          textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
          }


          muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
          muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matST");


          GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
          GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);






          share|improve this answer














          share|improve this answer



          share|improve this answer








          edited Dec 31 '18 at 17:17

























          answered Dec 31 '18 at 10:07









          Rabbid76Rabbid76

          37.1k113247




          37.1k113247













          • Awesome! It worked. Thank you so much.

            – apurva
            Dec 31 '18 at 17:21













          • @apurva You're welcome. I'm pleased to help you.

            – Rabbid76
            Dec 31 '18 at 17:40



















          • Awesome! It worked. Thank you so much.

            – apurva
            Dec 31 '18 at 17:21













          • @apurva You're welcome. I'm pleased to help you.

            – Rabbid76
            Dec 31 '18 at 17:40

















          Awesome! It worked. Thank you so much.

          – apurva
          Dec 31 '18 at 17:21







          Awesome! It worked. Thank you so much.

          – apurva
          Dec 31 '18 at 17:21















          @apurva You're welcome. I'm pleased to help you.

          – Rabbid76
          Dec 31 '18 at 17:40





          @apurva You're welcome. I'm pleased to help you.

          – Rabbid76
          Dec 31 '18 at 17:40


















          draft saved

          draft discarded




















































          Thanks for contributing an answer to Stack Overflow!


          • Please be sure to answer the question. Provide details and share your research!

          But avoid



          • Asking for help, clarification, or responding to other answers.

          • Making statements based on opinion; back them up with references or personal experience.


          To learn more, see our tips on writing great answers.




          draft saved


          draft discarded














          StackExchange.ready(
          function () {
          StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53985760%2fhow-to-provide-a-shader-function-to-perform-the-alpha-masking-for-video%23new-answer', 'question_page');
          }
          );

          Post as a guest















          Required, but never shown





















































          Required, but never shown














          Required, but never shown












          Required, but never shown







          Required, but never shown

































          Required, but never shown














          Required, but never shown












          Required, but never shown







          Required, but never shown







          Popular posts from this blog

          Monofisismo

          Angular Downloading a file using contenturl with Basic Authentication

          Olmecas