2017-12-05 1 views
0

ARCore에서 알 수 있듯이 수평면을 클릭하는 동안 3D 오브젝트를 감지 할 수 있습니다. 3D 오브젝트 대신 사용자가 평면 서페이스을 클릭하면 비디오를 표시해야합니다. Look and Feel은 3d 오브젝트가 표시하는 것과 같아야합니다. 3D 개체 대신 미리보기 모드로 비디오를 표시해야합니다.ARCORE에서 비디오를 재생해야합니다.

ARcore에서는 현재 표면보기가있는 Relativelayout을 하나 사용하고 있습니다. 따라서 비디오를 표시하기 위해 Surfaceview를 사용하고 mediaplayer로 연결합니다.

public void onsurfacecreatedvideo(){ 
    mProgram = createProgram(mVertexShader, mFragmentShader); 
    if (mProgram == 0) { 
     return; 
    } 
    maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); 
    checkGlError("glGetAttribLocation aPosition"); 
    if (maPositionHandle == -1) { 
     throw new RuntimeException("Could not get attrib location for aPosition"); 
    } 
    maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); 
    checkGlError("glGetAttribLocation aTextureCoord"); 
    if (maTextureHandle == -1) { 
     throw new RuntimeException("Could not get attrib location for aTextureCoord"); 
    } 

    muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); 
    checkGlError("glGetUniformLocation uMVPMatrix"); 
    if (muMVPMatrixHandle == -1) { 
     throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 
    } 

    muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); 
    checkGlError("glGetUniformLocation uSTMatrix"); 
    if (muSTMatrixHandle == -1) { 
     throw new RuntimeException("Could not get attrib location for uSTMatrix"); 
    } 


    int[] textures = new int[1]; 
    GLES20.glGenTextures(1, textures, 0); 

    mTextureID = textures[0]; 
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID); 
    checkGlError("glBindTexture mTextureID"); 

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 
      GLES20.GL_NEAREST); 
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 
      GLES20.GL_LINEAR); 

     /* 
     * Create the SurfaceTexture that will feed this textureID, 
     * and pass it to the MediaPlayer 
     */ 
    mSurface = new SurfaceTexture(mTextureID); 
    mSurface.setOnFrameAvailableListener(this); 



    Surface surface = new Surface(mSurface); 
    mMediaPlayer.setSurface(surface); 
    mMediaPlayer.setScreenOnWhilePlaying(true); 

    surface.release(); 

    mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { 
     @Override 
     public void onPrepared(MediaPlayer mp) { 
      Log.i(TAG,"ONPREPArED abhilash"); 
      setVideoSize(); 
      mp.start(); 
     } 
    }); 
    try { 
     mMediaPlayer.prepare(); 
    } catch (IOException t) { 
     Log.e(TAG, "media player prepare failed"); 
    } 

    synchronized(this) { 
     updateSurface = false; 
    } 

    mMediaPlayer.start(); 

} 

void ondrawvideo(){ 
    synchronized(this) { 
     if (updateSurface) { 
      mSurface.updateTexImage(); 
      mSurface.getTransformMatrix(mSTMatrix); 
      updateSurface = false; 
     } 
    } 

    ///////////// 
    GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 
    GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 


    GLES20.glUseProgram(mProgram); 
    checkGlError("glUseProgram"); 

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID); 



    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
    checkGlError("glVertexAttribPointer maPosition"); 
    GLES20.glEnableVertexAttribArray(maPositionHandle); 
    checkGlError("glEnableVertexAttribArray maPositionHandle"); 

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 
    GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false, 
      TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
    checkGlError("glVertexAttribPointer maTextureHandle"); 
    GLES20.glEnableVertexAttribArray(maTextureHandle); 
    checkGlError("glEnableVertexAttribArray maTextureHandle"); 

    Matrix.setIdentityM(mMVPMatrix, 0); 
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); 
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); 

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 
    checkGlError("glDrawArrays"); 
    GLES20.glFinish(); 

} 


     // Visualize planes. 
     mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx); 


     // Visualize anchors created by touch. 
     float scaleFactor = 1.0f; 
     for (PlaneAttachment planeAttachment : mTouches) { 
      ondrawvideo(); 
      if (!planeAttachment.isTracking()) { 
       continue; 
      } 


      // Get the current combined pose of an Anchor and Plane in world space. The Anchor 
      // and Plane poses are updated during calls to session.update() as ARCore refines 
      // its estimate of the world. 
      planeAttachment.getPose().toMatrix(mAnchorMatrix, 0); 

      // Update and draw the model and its shadow. 
      mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor); 
      mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor); 
      mVirtualObject.draw(viewmtx, projmtx, lightIntensity); 
      mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity); 
     } 

    } catch (Throwable t) { 
     // Avoid crashing the application due to unhandled exceptions. 
     Log.e(TAG, "Exception on the OpenGL thread", t); 
    } 
} 

현재 내 결과는 다음과 같습니다. 나는 비행기 표면이 같은 자사의 전시를 클릭 생각하는 경우 : 당신이 볼 수 있듯이

enter image description here

는 이미지 아래,이 같은 그것을 달성 할 필요가있다. 나는이 특정 bugdroid 이미지에서 비디오가 재생되어야한다고 표시했습니다. 비디오는 전체 화면을 초과해서는 안됩니다. 단지 바로 bugdroid 이미지 크기와 같이 표시한다 :

enter image description here

답변

0

솔루션은 내가 MovieClipRenderer라는 새로운 클래스를 만들어이 짓을 ARCore SDK

0

와 UNITY를 사용합니다 - 모델로 HelloAR 샘플의 ObjectRenderer 클래스입니다. 이렇게하면 쿼드 기하학이 만들어지고 쿼드의 미디어 플레이어에서 텍스처가 렌더링됩니다. 쿼드는 평면에 고정되어 있으므로 사용자가 주변을 둘러 볼 때 움직이지 않습니다. 그리고 내가 렌더러를 초기화 onSurfaceCreated()에서 HelloArActivity

private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer(); 

에 렌더러의 멤버 변수를 추가 https://www.videvo.net/video/chicken-on-green-screen/3435/src/main/assets

에 추가 :와 테스트하려면

, 나는에서 주식 동영상을 사용 다른 사용자와

mMovieClipRenderer.createOnGlThread(); 

내가 앵커 검사 onDrawFrame()의 맨 아래에 다음

if (mMovieAnchor == null) { 
    mMovieAnchor = hit.createAnchor(); 
} else { 
    mAnchors.add(hit.createAnchor()); 
} 

하고 연주하기 시작 : 전자 평면의 첫 번째 탭은 약간로 변경 적중 테스트 코드에 의해 영화 앵커를 만들

if (mMovieAnchor != null) { 
     // Draw chickens! 
     if (!mMovieClipRenderer.isStarted()) { 
      mMovieClipRenderer.play("chicken.mp4", this); 
     } 
     mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0); 
     mMovieClipRenderer.update(mAnchorMatrix, 0.25f); 
     mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx); 
    } 

렌더링 클래스는 꽤 ​​길지만 OES 텍스처를 만들고 비디오 플레이어를 초기화하고 쿼드의 정점을 만들고 OES 텍스처를 그리는 프래그먼트 셰이더를로드하는 꽤 표준적인 GLES 코드입니다.

/** 
* Renders a movie clip with a green screen aware shader. 
* <p> 
* Copyright 2018 Google LLC 
* <p> 
* Licensed under the Apache License, Version 2.0 (the "License"); 
* you may not use this file except in compliance with the License. 
* You may obtain a copy of the License at 
* <p> 
* http://www.apache.org/licenses/LICENSE-2.0 
* <p> 
* Unless required by applicable law or agreed to in writing, software 
* distributed under the License is distributed on an "AS IS" BASIS, 
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
* See the License for the specific language governing permissions and 
* limitations under the License. 
*/ 
public class MovieClipRenderer implements 
    SurfaceTexture.OnFrameAvailableListener { 
    private static final String TAG = MovieClipRenderer.class.getSimpleName(); 

    // Quad geometry 
    private static final int COORDS_PER_VERTEX = 3; 
    private static final int TEXCOORDS_PER_VERTEX = 2; 
    private static final int FLOAT_SIZE = 4; 
    private static final float[] QUAD_COORDS = new float[]{ 
      -1.0f, -1.0f, 0.0f, 
      -1.0f, +1.0f, 0.0f, 
      +1.0f, -1.0f, 0.0f, 
      +1.0f, +1.0f, 0.0f, 
    }; 

    private static final float[] QUAD_TEXCOORDS = new float[]{ 
      0.0f, 1.0f, 
      0.0f, 0.0f, 
      1.0f, 1.0f, 
      1.0f, 0.0f, 
    }; 

    // Shader for a flat quad. 
    private static final String VERTEX_SHADER = 
     "uniform mat4 u_ModelViewProjection;\n\n" + 
     "attribute vec4 a_Position;\n" + 
     "attribute vec2 a_TexCoord;\n\n" + 
     "varying vec2 v_TexCoord;\n\n" + 
     "void main() {\n" + 
     " gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" + 
    " v_TexCoord = a_TexCoord;\n" + 
    "}"; 

    // The fragment shader samples the video texture, blending to 
    // transparent for the green screen 
    // color. The color was determined by sampling a screenshot 
    // of the video in an image editor. 
    private static final String FRAGMENT_SHADER = 
     "#extension GL_OES_EGL_image_external : require\n" + 
     "\n" + 
     "precision mediump float;\n" + 
     "varying vec2 v_TexCoord;\n" + 
     "uniform samplerExternalOES sTexture;\n" + 
     "\n" + 
     "void main() {\n" + 
     " //TODO make this a uniform variable - " + 
     " but this is the color of the background. 17ad2b\n" + 
     " vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" + 
     " float thresh = 0.4f; // 0 - 1.732\n" + 
     " float slope = 0.2;\n" + 
     " vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" + 
     " float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" + 
     " float edge0 = thresh * (1.0f - slope);\n" + 
     " float alpha = smoothstep(edge0,thresh,d);\n" + 
     " gl_FragColor = vec4(input_color, alpha);\n" + 
     "}"; 

    // Geometry data in GLES friendly data structure. 
    private FloatBuffer mQuadVertices; 
    private FloatBuffer mQuadTexCoord; 

    // Shader program id and parameters. 
    private int mQuadProgram; 
    private int mQuadPositionParam; 
    private int mQuadTexCoordParam; 
    private int mModelViewProjectionUniform; 
    private int mTextureId = -1; 

    // Matrix for the location and perspective of the quad. 
    private float[] mModelMatrix = new float[16]; 

    // Media player, texture and other bookkeeping. 
    private MediaPlayer player; 
    private SurfaceTexture videoTexture; 
    private boolean frameAvailable = false; 
    private boolean started = false; 
    private boolean done; 
    private boolean prepared; 
    private static Handler handler; 


    // Lock used for waiting if the player was not yet created. 
    private final Object lock = new Object(); 

    /** 
    * Update the model matrix based on the location and scale to draw the quad. 
    */ 
    public void update(float[] modelMatrix, float scaleFactor) { 
    float[] scaleMatrix = new float[16]; 
    Matrix.setIdentityM(scaleMatrix, 0); 
    scaleMatrix[0] = scaleFactor; 
    scaleMatrix[5] = scaleFactor; 
    scaleMatrix[10] = scaleFactor; 
    Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0); 
    } 

    /** 
    * Initialize the GLES objects. 
    * This is called from the GL render thread to make sure 
    * it has access to the EGLContext. 
    */ 
    public void createOnGlThread() { 

    // 1 texture to hold the video frame. 
    int textures[] = new int[1]; 
    GLES20.glGenTextures(1, textures, 0); 
    mTextureId = textures[0]; 
    int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; 
    GLES20.glBindTexture(mTextureTarget, mTextureId); 
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S, 
     GLES20.GL_CLAMP_TO_EDGE); 
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T, 
     GLES20.GL_CLAMP_TO_EDGE); 
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER, 
     GLES20.GL_NEAREST); 
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER, 
     GLES20.GL_NEAREST); 

    videoTexture = new SurfaceTexture(mTextureId); 
    videoTexture.setOnFrameAvailableListener(this); 

    // Make a quad to hold the movie 
    ByteBuffer bbVertices = ByteBuffer.allocateDirect(
     QUAD_COORDS.length * FLOAT_SIZE); 
    bbVertices.order(ByteOrder.nativeOrder()); 
    mQuadVertices = bbVertices.asFloatBuffer(); 
    mQuadVertices.put(QUAD_COORDS); 
    mQuadVertices.position(0); 

    int numVertices = 4; 
    ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
      numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE); 
    bbTexCoords.order(ByteOrder.nativeOrder()); 
    mQuadTexCoord = bbTexCoords.asFloatBuffer(); 
    mQuadTexCoord.put(QUAD_TEXCOORDS); 
    mQuadTexCoord.position(0); 

    int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER); 
    int fragmentShader = loadGLShader(TAG, 
     GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER); 

    mQuadProgram = GLES20.glCreateProgram(); 
    GLES20.glAttachShader(mQuadProgram, vertexShader); 
    GLES20.glAttachShader(mQuadProgram, fragmentShader); 
    GLES20.glLinkProgram(mQuadProgram); 
    GLES20.glUseProgram(mQuadProgram); 

    ShaderUtil.checkGLError(TAG, "Program creation"); 

    mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position"); 
    mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord"); 
    mModelViewProjectionUniform = GLES20.glGetUniformLocation(
      mQuadProgram, "u_ModelViewProjection"); 

    ShaderUtil.checkGLError(TAG, "Program parameters"); 

    Matrix.setIdentityM(mModelMatrix, 0); 

    initializeMediaPlayer(); 
    } 

    public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) { 
    if (done || !prepared) { 
     return; 
    } 
    synchronized (this) { 
     if (frameAvailable) { 
     videoTexture.updateTexImage(); 
     frameAvailable = false; 
     } 
    } 

    float[] modelMatrix = new float[16]; 
    pose.toMatrix(modelMatrix, 0); 

    float[] modelView = new float[16]; 
    float[] modelViewProjection = new float[16]; 
    Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0); 
    Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0); 

    ShaderUtil.checkGLError(TAG, "Before draw"); 

    GLES20.glEnable(GL10.GL_BLEND); 
    GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA); 
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 
    GLES20.glUseProgram(mQuadProgram); 

    // Set the vertex positions. 
    GLES20.glVertexAttribPointer(
      mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT, 
      false, 0, mQuadVertices); 
    // Set the texture coordinates. 
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX, 
      GLES20.GL_FLOAT, false, 0, mQuadTexCoord); 

    // Enable vertex arrays 
    GLES20.glEnableVertexAttribArray(mQuadPositionParam); 
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam); 
    GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false, 
           modelViewProjection, 0); 

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 

    // Disable vertex arrays 
    GLES20.glDisableVertexAttribArray(mQuadPositionParam); 
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam); 

    ShaderUtil.checkGLError(TAG, "Draw"); 
    } 

    private void initializeMediaPlayer() { 
    if (handler == null) 
     handler = new Handler(Looper.getMainLooper()); 

    handler.post(new Runnable() { 
     @Override 
     public void run() { 
     synchronized (lock) { 
      player = new MediaPlayer(); 
      lock.notify(); 
     } 
     } 
    }); 
    } 

    @Override 
    public void onFrameAvailable(SurfaceTexture surfaceTexture) { 
    synchronized (this) { 
     frameAvailable = true; 
    } 
    } 

    public boolean play(final String filename, Context context) 
        throws FileNotFoundException { 
    // Wait for the player to be created. 
    if (player == null) { 
     synchronized (lock) { 
     while (player == null) { 
      try { 
      lock.wait(); 
      } catch (InterruptedException e) { 
      return false; 
      } 
     } 
     } 
    } 

    player.reset(); 
    done = false; 

    player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { 
     @Override 
     public void onPrepared(MediaPlayer mp) { 
     prepared = true; 
     mp.start(); 
     } 
    }); 
    player.setOnErrorListener(new MediaPlayer.OnErrorListener() { 
     @Override 
     public boolean onError(MediaPlayer mp, int what, int extra) { 
     done = true; 
     Log.e("VideoPlayer", 
      String.format("Error occured: %d, %d\n", what, extra)); 
     return false; 
     } 
    }); 

    player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { 
     @Override 
     public void onCompletion(MediaPlayer mp) { 
     done = true; 
     } 
    }); 

    player.setOnInfoListener(new MediaPlayer.OnInfoListener() { 
     @Override 
     public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) { 
     return false; 
     } 
    }); 

    try { 
     AssetManager assets = context.getAssets(); 
     AssetFileDescriptor descriptor = assets.openFd(filename); 
     player.setDataSource(descriptor.getFileDescriptor(), 
          descriptor.getStartOffset(), 
          descriptor.getLength()); 
     player.setSurface(new Surface(videoTexture)); 
     player.prepareAsync(); 
     synchronized (this) { 
     started = true; 
     } 
    } catch (IOException e) { 
     Log.e(TAG, "Exception preparing movie", e); 
     return false; 
    } 

    return true; 
    } 

    public synchronized boolean isStarted() { 
    return started; 
    } 

    static int loadGLShader(String tag, int type, String code) { 
    int shader = GLES20.glCreateShader(type); 
    GLES20.glShaderSource(shader, code); 
    GLES20.glCompileShader(shader); 

    // Get the compilation status. 
    final int[] compileStatus = new int[1]; 
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0); 

    // If the compilation failed, delete the shader. 
    if (compileStatus[0] == 0) { 
     Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader)); 
     GLES20.glDeleteShader(shader); 
     shader = 0; 
    } 

    if (shader == 0) { 
     throw new RuntimeException("Error creating shader."); 
    } 

    return shader; 
    } 
} 
관련 문제