본문 바로가기

쓰기

 

오브젝트 파일

package com.stp.stplife.vuforia.SampleApplication.utils;

import android.app.Activity;
import android.content.res.AssetManager;
import android.content.res.Resources;
import android.text.TextUtils;
import android.util.Log;

import com.stp.stplife.R;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.regex.Pattern;

public class Tree extends MeshObject {
    private ByteBuffer verts;
    private ByteBuffer textCoords;
    private ByteBuffer norms;
    int numVerts = 0;

    private TreeObjectLoadingListener mTreeObjectLoadingListener;

    public void setmTreeObjectLoadingListener(TreeObjectLoadingListener mTreeObjectLoadingListener) {
        this.mTreeObjectLoadingListener = mTreeObjectLoadingListener;
    }

    public interface TreeObjectLoadingListener {
        void onComplete();
    }

    public Tree(AssetManager assetManager) throws IOException {
        vertsloadModel(assetManager, "verts.txt");
        normsloadModel(assetManager, "norms.txt");
        textCoordsloadModel(assetManager, "texcoords.txt");

        if (mTreeObjectLoadingListener != null)
            mTreeObjectLoadingListener.onComplete();
    }

    public void vertsloadModel(AssetManager assetManager, String filename) throws IOException {
        InputStream is = null;
        int floatsToRead = 0;
        try {
            is = assetManager.open(filename);
            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(is));

            String line = reader.readLine();

            floatsToRead = Integer.parseInt(line);
            numVerts = floatsToRead / 3;

            verts = ByteBuffer.allocateDirect(floatsToRead * 4);
            verts.order(ByteOrder.nativeOrder());
            for (int i = 0; i < floatsToRead; i++) {
                String curline = reader.readLine();
                if (TextUtils.isEmpty(curline) || curline.indexOf('/') >= 0) {
                    i--;
                    continue;
                }

                /*if (curline.indexOf(',') >= 0) {
                    String result[] = curline.split(",");
                    for (int j = 0; j < result.length; j++) {
                        verts.putFloat(Float.parseFloat(result[j]));
                    }
                } else {
                    verts.putFloat(Float.parseFloat(curline));
                }*/
                verts.putFloat(Float.parseFloat(curline));
            }
            verts.rewind();
        } catch (Exception e) {
            Log.d("SJ", "length verts1 : " + (floatsToRead * 4));
            Log.d("SJ", "length verts2 : " + verts.array().length);

        } finally {
            if (is != null)
                is.close();
        }
    }

    public void normsloadModel(AssetManager assetManager, String filename)
            throws IOException {
        InputStream is = null;
        try {
            is = assetManager.open(filename);
            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(is));

            String line = reader.readLine();
            int floatsToRead = Integer.parseInt(line);

            norms = ByteBuffer.allocateDirect(floatsToRead * 4);
            norms.order(ByteOrder.nativeOrder());
            for (int i = 0; i < floatsToRead; i++) {
                String curline = reader.readLine();
                if (TextUtils.isEmpty(curline) || curline.indexOf('/') >= 0) {
                    i--;
                    continue;
                }

                /*if (curline.indexOf(',') >= 0) {
                    String result[] = curline.split(",");
                    for (int j = 0; j < result.length; j++) {
                        norms.putFloat(Float.parseFloat(result[j]));
                    }
                } else {
                    norms.putFloat(Float.parseFloat(curline));
                }*/
                norms.putFloat(Float.parseFloat(curline));
            }
            norms.rewind();
        } catch (Exception e) {
            Log.d("SJ", "length norms : " + norms.array().length);

        } finally {
            if (is != null)
                is.close();
        }
    }

    public void textCoordsloadModel(AssetManager assetManager, String filename)
            throws IOException {
        InputStream is = null;
        try {
            is = assetManager.open(filename);
            BufferedReader reader = new BufferedReader(
                    new InputStreamReader(is));

            String line = reader.readLine();
            int floatsToRead = Integer.parseInt(line);

            textCoords = ByteBuffer.allocateDirect(floatsToRead * 4);
            textCoords.order(ByteOrder.nativeOrder());
            for (int i = 0; i < floatsToRead; i++) {
                String curline = reader.readLine();
                if (TextUtils.isEmpty(curline) || curline.indexOf('/') >= 0) {
                    i--;
                    continue;
                }

                /*if (curline.indexOf(',') >= 0) {
                    String result[] = curline.split(",");
                    for (int j = 0; j < result.length; j++) {
                        textCoords.putFloat(Float.parseFloat(result[j]));
                    }
                } else {
                    textCoords.putFloat(Float.parseFloat(curline));
                }*/
                textCoords.putFloat(Float.parseFloat(curline));
            }
            textCoords.rewind();

        } catch (Exception e) {
            Log.d("SJ", "length textCoords : " + textCoords.array().length);

        } finally {
            if (is != null)
                is.close();
        }
    }


    @Override
    public Buffer getBuffer(BUFFER_TYPE bufferType) {
        Buffer result = null;
        switch (bufferType) {
            case BUFFER_TYPE_VERTEX:
                result = verts;
                break;
            case BUFFER_TYPE_TEXTURE_COORD:
                result = textCoords;
                break;
            case BUFFER_TYPE_NORMALS:
                result = norms;
            default:
                break;
        }
        return result;
    }


    @Override
    public int getNumObjectVertex() {
        return numVerts;
    }


    @Override
    public int getNumObjectIndex() {
        return 0;
    }

}

 

 

 

렌더러 파일

/*===============================================================================
Copyright (c) 2016 PTC Inc. All Rights Reserved.

Copyright (c) 2012-2014 Qualcomm Connected Experiences, Inc. All Rights Reserved.

Vuforia is a trademark of PTC Inc., registered in the United States and other 
countries.
===============================================================================*/

package com.stp.stplife.vuforia.VideoPlayback;

import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.content.Intent;
import android.graphics.Bitmap;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Handler;
import android.os.SystemClock;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.SurfaceHolder;
import android.widget.Toast;

import com.orhanobut.logger.Logger;
import com.stp.stplife.StpCaptrueActivity;
import com.stp.stplife.StpLifeApplication;
import com.stp.stplife.util.Utils;
import com.stp.stplife.vuforia.SampleApplication.SampleAppRenderer;
import com.stp.stplife.vuforia.SampleApplication.SampleAppRendererControl;
import com.stp.stplife.vuforia.SampleApplication.SampleApplicationSession;
import com.stp.stplife.vuforia.SampleApplication.utils.CubeShaders;
import com.stp.stplife.vuforia.SampleApplication.utils.SampleMath;
import com.stp.stplife.vuforia.SampleApplication.utils.SampleUtils;
import com.stp.stplife.vuforia.SampleApplication.utils.Texture;
import com.stp.stplife.vuforia.SampleApplication.utils.Tree;
import com.stp.stplife.vuforia.VideoPlayback.VideoPlayerHelper.MEDIA_STATE;
import com.stp.stplife.vuforia.VideoPlayback.VideoPlayerHelper.MEDIA_TYPE;
import com.vuforia.Device;
import com.vuforia.ImageTarget;
import com.vuforia.Matrix44F;
import com.vuforia.Renderer;
import com.vuforia.State;
import com.vuforia.Tool;
import com.vuforia.TrackableResult;
import com.vuforia.Vec2F;
import com.vuforia.Vec3F;
import com.vuforia.Vuforia;
import com.vuforia.ar.pl.DebugLog;

import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Vector;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

// http://www.codexiu.cn/android/blog/30581/
// The renderer class for the VideoPlayback sample.
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public class VideoPlaybackRenderer implements GLSurfaceView.Renderer, SampleAppRendererControl {
    private static final String LOGTAG = "VideoPlaybackRenderer";
    private static final float OBJECT_SCALE_FLOAT = 120.0f;

    SampleApplicationSession vuforiaAppSession;
    SampleAppRenderer mSampleAppRenderer;

    // Video Playback Rendering Specific
    private int videoPlaybackShaderID = 0;
    private int videoFullPlaybackShaderID = 0;
    private int videoPlaybackVertexHandle = 0;
    private int videoPlaybackTexCoordHandle = 0;
    private int videoPlaybackMVPMatrixHandle = 0;
    private int videoPlaybackTexSamplerOESHandle = 0;

    // Video Playback Textures for the two targets
    int videoPlaybackTextureID[] = new int[VideoPlayback.NUM_TARGETS];

    // Keyframe and icon rendering specific
    private int keyframeShaderID = 0;
    private int keyframeVertexHandle = 0;
    private int keyframeTexCoordHandle = 0;
    private int keyframeMVPMatrixHandle = 0;
    private int keyframeTexSampler2DHandle = 0;

    // 트리 쉐이더
    private int shaderProgramID;
    private int vertexHandle;
    private int normalHandle;
    private int textureCoordHandle;
    private int mvpMatrixHandle;
    private int texSampler2DHandle;

    // We cannot use the default texture coordinates of the quad since these
    // will change depending on the video itself
    private float videoQuadTextureCoords[] = {0.0f, 0.0f, 1.0f, 0.0f, 1.0f,
            1.0f, 0.0f, 1.0f,};

    // This variable will hold the transformed coordinates (changes every frame)
    private float videoQuadTextureCoordsTransformedStones[] = {0.0f, 0.0f,
            1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f,};

    private float videoQuadTextureCoordsTransformedChips[] = {0.0f, 0.0f,
            1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f,};

    // Trackable dimensions
    Vec3F targetPositiveDimensions[] = new Vec3F[VideoPlayback.NUM_TARGETS];

    static int NUM_QUAD_VERTEX = 4;
    static int NUM_QUAD_INDEX = 6;

    double quadVerticesArray[] = {-1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f,
            1.0f, 0.0f, -1.0f, 1.0f, 0.0f};

    double quadTexCoordsArray[] = {0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f,
            1.0f};

    double quadNormalsArray[] = {0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1,};

    short quadIndicesArray[] = {0, 1, 2, 2, 3, 0};

    Buffer quadVertices, quadTexCoords, quadIndices, quadNormals;

    private boolean mIsActive = false;
    private Matrix44F tappingProjectionMatrix = null;

    private float[][] mTexCoordTransformationMatrix = null;
    private VideoPlayerHelper mVideoPlayerHelper[] = null;
    private String mMovieName[] = null;
    private MEDIA_TYPE mCanRequestType[] = null;
    private int mSeekPosition[] = null;
    private boolean mShouldPlayImmediately[] = null;
    private long mLostTrackingSince[] = null;
    private boolean mLoadRequested[] = null;

    private int mViewWidth = 0;
    private int mViewHeight = 0;
    private boolean isCaptrue = false; // 사진 촬영
    private boolean isRecording = false; // 현재 비디오 촬영중인지 유무
    private ArrayList<Bitmap> mVideoBmps;
    private boolean isAutoStart = false; // 처음 자동 동작을 위해서
    private SurfaceHolder mSurfaceHolder;
    private Tree mTree;

    VideoPlayback mActivity;

    // Needed to calculate whether a screen tap is inside the target
    Matrix44F modelViewMatrix[] = new Matrix44F[VideoPlayback.NUM_TARGETS];

    private Vector<Texture> mTextures;

    boolean isTracking[] = new boolean[VideoPlayback.NUM_TARGETS];
    MEDIA_STATE currentStatus[] = new MEDIA_STATE[VideoPlayback.NUM_TARGETS];

    // These hold the aspect ratio of both the video and the
    // keyframe
    float videoQuadAspectRatio[] = new float[VideoPlayback.NUM_TARGETS];
    float keyframeQuadAspectRatio[] = new float[VideoPlayback.NUM_TARGETS];
    private float kTreeScale = 0.5f;


    public VideoPlaybackRenderer(VideoPlayback activity,
                                 SampleApplicationSession session, SurfaceHolder surfaceHolder) {

        mActivity = activity;
        vuforiaAppSession = session;
        mVideoBmps = new ArrayList<Bitmap>();
        mSurfaceHolder = surfaceHolder;

        // SampleAppRenderer used to encapsulate the use of RenderingPrimitives setting
        // the device mode AR/VR and stereo mode
        mSampleAppRenderer = new SampleAppRenderer(this, mActivity, Device.MODE.MODE_AR, false, 0.01f, 5f);

        // Create an array of the size of the number of targets we have
        mVideoPlayerHelper = new VideoPlayerHelper[VideoPlayback.NUM_TARGETS];
        mMovieName = new String[VideoPlayback.NUM_TARGETS];
        mCanRequestType = new MEDIA_TYPE[VideoPlayback.NUM_TARGETS];
        mSeekPosition = new int[VideoPlayback.NUM_TARGETS];
        mShouldPlayImmediately = new boolean[VideoPlayback.NUM_TARGETS];
        mLostTrackingSince = new long[VideoPlayback.NUM_TARGETS];
        mLoadRequested = new boolean[VideoPlayback.NUM_TARGETS];
        mTexCoordTransformationMatrix = new float[VideoPlayback.NUM_TARGETS][16];

        // Initialize the arrays to default values
        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            mVideoPlayerHelper[i] = null;
            mMovieName[i] = "";
            mCanRequestType[i] = MEDIA_TYPE.ON_TEXTURE;
            mSeekPosition[i] = 0;
            mShouldPlayImmediately[i] = false;
            mLostTrackingSince[i] = -1;
            mLoadRequested[i] = false;
        }

        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++)
            targetPositiveDimensions[i] = new Vec3F();

        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++)
            modelViewMatrix[i] = new Matrix44F();
    }

    public boolean isCaptrue() {
        return isCaptrue;
    }

    public void setCaptrue(boolean captrue) {
        isCaptrue = captrue;
    }

    public boolean isRecording() {
        return isRecording;
    }

    public void setRecording(boolean recording) {
        isRecording = recording;
    }

    public ArrayList<Bitmap> getVideoBmps() {
        return mVideoBmps;
    }

    public void setVideoBmps(ArrayList<Bitmap> mVideoBmps) {
        this.mVideoBmps = mVideoBmps;
    }

    // Store the Player Helper object passed from the main activity
    public void setVideoPlayerHelper(int target,
                                     VideoPlayerHelper newVideoPlayerHelper) {
        mVideoPlayerHelper[target] = newVideoPlayerHelper;
    }


    public void requestLoad(int target, String movieName, int seekPosition,
                            boolean playImmediately) {
        mMovieName[target] = movieName;
        mSeekPosition[target] = seekPosition;
        mShouldPlayImmediately[target] = playImmediately;
        mLoadRequested[target] = true;
    }


    // Called when the surface is created or recreated.
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // Call function to initialize rendering:
        // The video texture is also created on this step
        initRendering();

        // Call Vuforia function to (re)initialize rendering after first use
        // or after OpenGL ES context was lost (e.g. after onPause/onResume):
        Vuforia.onSurfaceCreated();

        mSampleAppRenderer.onSurfaceCreated();

        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {

            if (mVideoPlayerHelper[i] != null) {
                // The VideoPlayerHelper needs to setup a surface texture given
                // the texture id
                // Here we inform the video player that we would like to play
                // the movie
                // both on texture and on full screen
                // Notice that this does not mean that the platform will be able
                // to do what we request
                // After the file has been loaded one must always check with
                // isPlayableOnTexture() whether
                // this can be played embedded in the AR scene
                if (!mVideoPlayerHelper[i]
                        .setupSurfaceTexture(videoPlaybackTextureID[i]))
                    mCanRequestType[i] = MEDIA_TYPE.FULLSCREEN;
                else
                    mCanRequestType[i] = MEDIA_TYPE.ON_TEXTURE_FULLSCREEN;

                // And now check if a load has been requested with the
                // parameters passed from the main activity
                if (mLoadRequested[i]) {
                    mVideoPlayerHelper[i].load(mMovieName[i],
                            mCanRequestType[i], mShouldPlayImmediately[i],
                            mSeekPosition[i], mSurfaceHolder);
                    mLoadRequested[i] = false;
                }
            }
        }
    }


    // Called when the surface changed size.
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        mViewWidth = width;
        mViewHeight = height;

        // Call Vuforia function to handle render surface size changes:
        Vuforia.onSurfaceChanged(width, height);

        // RenderingPrimitives to be updated when some rendering change is done
        mSampleAppRenderer.onConfigurationChanged(mIsActive);

        // Upon every on pause the movie had to be unloaded to release resources
        // Thus, upon every surface create or surface change this has to be
        // reloaded
        // See:
        // http://developer.android.com/reference/android/media/MediaPlayer.html#release()
        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            if (mLoadRequested[i] && mVideoPlayerHelper[i] != null) {
                mVideoPlayerHelper[i].load(mMovieName[i], mCanRequestType[i],
                        mShouldPlayImmediately[i], mSeekPosition[i], mSurfaceHolder);
                mLoadRequested[i] = false;
            }
        }
    }


    // Called to draw the current frame.
    public void onDrawFrame(GL10 gl) {
        if (!mIsActive)
            return;

        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            if (mVideoPlayerHelper[i] != null) {
                if (mVideoPlayerHelper[i].isPlayableOnTexture()) {
                    // First we need to update the video data. This is a built
                    // in Android call
                    // Here, the decoded data is uploaded to the OES texture
                    // We only need to do this if the movie is playing
                    if (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING) {
                        mVideoPlayerHelper[i].updateVideoData();
                    }

                    // According to the Android API
                    // (http://developer.android.com/reference/android/graphics/SurfaceTexture.html)
                    // transforming the texture coordinates needs to happen
                    // every frame.
                    mVideoPlayerHelper[i]
                            .getSurfaceTextureTransformMatrix(mTexCoordTransformationMatrix[i]);
                    setVideoDimensions(i,
                            mVideoPlayerHelper[i].getVideoWidth(),
                            mVideoPlayerHelper[i].getVideoHeight(),
                            mTexCoordTransformationMatrix[i]);
                }

                setStatus(i, mVideoPlayerHelper[i].getStatus().getNumericType());
            }
        }

        // Call our function to render content from SampleAppRenderer class
        mSampleAppRenderer.render();

        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            // Ask whether the target is currently being tracked and if so react
            // to it
            if (isTracking(i)) {
                // If it is tracking reset the timestamp for lost tracking
                mLostTrackingSince[i] = -1;
            } else {
                // If it isn't tracking
                // check whether it just lost it or if it's been a while
                if (mLostTrackingSince[i] < 0)
                    mLostTrackingSince[i] = SystemClock.uptimeMillis();
                else {
                    // If it's been more than 2 seconds then pause the player
                    if ((SystemClock.uptimeMillis() - mLostTrackingSince[i]) > 2000) {
                        /*if (mVideoPlayerHelper[i] != null)
                            mVideoPlayerHelper[i].pause();*/
                    }
                }
            }
        }

        if (isCaptrue) {
            DebugLog.LOGD("SJ", "captrue");
            Bitmap bmp = Utils.grabPixels(0, 0, mViewWidth, mViewHeight);
            //Utils.saveScreenShot(mActivity, 0, 0, mViewWidth, mViewHeight);
            setCaptrue(false);

            ((StpLifeApplication) mActivity.getApplication()).setCaptrueBitmap(bmp);

            Intent intent = new Intent(mActivity, StpCaptrueActivity.class);
            mActivity.startActivity(intent);
        }

        // If you would like the video to start playing as soon as it starts
        // tracking
        // and pause as soon as tracking is lost you can do that here by
        // commenting
        // the for-loop above and instead checking whether the isTracking()
        // value has
        // changed since the last frame. Notice that you need to be careful not
        // to
        // trigger automatic playback for fullscreen since that will be
        // inconvenient
        // for your users.

    }


    public void setActive(boolean active) {
        mIsActive = active;

        if (mIsActive)
            mSampleAppRenderer.configureVideoBackground();
    }


    @SuppressLint("InlinedApi")
    void initRendering() {
        Log.d(LOGTAG, "VideoPlayback VideoPlaybackRenderer initRendering");
        mTree = ((StpLifeApplication) mActivity.getApplication()).getTree();
        if (mTree == null) {
            mActivity.finish();
        }

        // Define clear color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f
                : 1.0f);

        // Now generate the OpenGL texture objects and add settings
        for (Texture t : mTextures) {
            // Here we create the textures for the keyframe
            // and for all the icons
            GLES20.glGenTextures(1, t.mTextureID, 0);
            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                    GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                    GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                    GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
                    GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA,
                    t.mWidth, t.mHeight, 0, GLES20.GL_RGBA,
                    GLES20.GL_UNSIGNED_BYTE, t.mData);
        }

        // Now we create the texture for the video data from the movie
        // IMPORTANT:
        // Notice that the textures are not typical GL_TEXTURE_2D textures
        // but instead are GL_TEXTURE_EXTERNAL_OES extension textures
        // This is required by the Android SurfaceTexture
        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            GLES20.glGenTextures(1, videoPlaybackTextureID, i);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    videoPlaybackTextureID[i]);
            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                    GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
        }

        // The first shader is the one that will display the video data of the
        // movie
        // (it is aware of the GL_TEXTURE_EXTERNAL_OES extension)
        videoPlaybackShaderID = SampleUtils.createProgramFromShaderSrc(
                VideoPlaybackShaders.VIDEO_PLAYBACK_VERTEX_SHADER,
                VideoPlaybackShaders.VIDEO_PLAYBACK_FRAGMENT_SHADER);
        videoFullPlaybackShaderID = SampleUtils.createProgramFromShaderSrc(
                VideoPlaybackShaders.VIDEO_FULL_PLAYBACK_VERTEX_SHADER,
                VideoPlaybackShaders.VIDEO_PLAYBACK_FRAGMENT_SHADER);
        videoPlaybackVertexHandle = GLES20.glGetAttribLocation(
                videoPlaybackShaderID, "vertexPosition");
        videoPlaybackTexCoordHandle = GLES20.glGetAttribLocation(
                videoPlaybackShaderID, "vertexTexCoord");
        videoPlaybackMVPMatrixHandle = GLES20.glGetUniformLocation(
                videoPlaybackShaderID, "modelViewProjectionMatrix");
        videoPlaybackTexSamplerOESHandle = GLES20.glGetUniformLocation(
                videoPlaybackShaderID, "texSamplerOES");

        // This is a simpler shader with regular 2D textures
        keyframeShaderID = SampleUtils.createProgramFromShaderSrc(
                KeyFrameShaders.KEY_FRAME_VERTEX_SHADER,
                KeyFrameShaders.KEY_FRAME_FRAGMENT_SHADER);
        keyframeVertexHandle = GLES20.glGetAttribLocation(keyframeShaderID,
                "vertexPosition");
        keyframeTexCoordHandle = GLES20.glGetAttribLocation(keyframeShaderID,
                "vertexTexCoord");
        keyframeMVPMatrixHandle = GLES20.glGetUniformLocation(keyframeShaderID,
                "modelViewProjectionMatrix");
        keyframeTexSampler2DHandle = GLES20.glGetUniformLocation(
                keyframeShaderID, "texSampler2D");

        shaderProgramID = SampleUtils.createProgramFromShaderSrc(
                CubeShaders.CUBE_MESH_VERTEX_SHADER,
                CubeShaders.CUBE_MESH_FRAGMENT_SHADER);

        vertexHandle = GLES20.glGetAttribLocation(shaderProgramID,
                "vertexPosition");
        textureCoordHandle = GLES20.glGetAttribLocation(shaderProgramID,
                "vertexTexCoord");
        normalHandle = GLES20.glGetAttribLocation(shaderProgramID,
                "vertexNormal");
        mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID,
                "modelViewProjectionMatrix");
        texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID,
                "texSampler2D");

        keyframeQuadAspectRatio[VideoPlayback.TEST2] = (float) mTextures
                .get(3).mHeight / (float) mTextures.get(3).mWidth;
        keyframeQuadAspectRatio[VideoPlayback.TEST1] = (float) mTextures.get(3).mHeight
                / (float) mTextures.get(3).mWidth;

        quadVertices = fillBuffer(quadVerticesArray);
        quadTexCoords = fillBuffer(quadTexCoordsArray);
        quadIndices = fillBuffer(quadIndicesArray);
        quadNormals = fillBuffer(quadNormalsArray);
    }


    private Buffer fillBuffer(double[] array) {
        // Convert to floats because OpenGL doesnt work on doubles, and manually
        // casting each input value would take too much time.
        ByteBuffer bb = ByteBuffer.allocateDirect(4 * array.length); // each float takes 4 bytes
        bb.order(ByteOrder.LITTLE_ENDIAN);
        for (double d : array)
            bb.putFloat((float) d);
        bb.rewind();

        return bb;

    }


    private Buffer fillBuffer(short[] array) {
        ByteBuffer bb = ByteBuffer.allocateDirect(2 * array.length); // each
        // short
        // takes 2
        // bytes
        bb.order(ByteOrder.LITTLE_ENDIAN);
        for (short s : array)
            bb.putShort(s);
        bb.rewind();

        return bb;

    }


    private Buffer fillBuffer(float[] array) {
        // Convert to floats because OpenGL doesnt work on doubles, and manually
        // casting each input value would take too much time.
        ByteBuffer bb = ByteBuffer.allocateDirect(4 * array.length); // each float takes 4 bytes
        bb.order(ByteOrder.LITTLE_ENDIAN);
        for (float d : array)
            bb.putFloat(d);
        bb.rewind();

        return bb;

    }


    public void updateRenderingPrimitives() {
        mSampleAppRenderer.updateRenderingPrimitives();
    }


    @SuppressLint("InlinedApi")
    // The render function called from SampleAppRendering by using RenderingPrimitives views.
    // The state is owned by SampleAppRenderer which is controlling it's lifecycle.
    // State should not be cached outside this method.
    public void renderFrame(State state, float[] projectionMatrix) {
        if (mTree == null)
            return;

        // Renders video background replacing Renderer.DrawVideoBackground()
        mSampleAppRenderer.renderVideoBackground();

        GLES20.glEnable(GLES20.GL_DEPTH_TEST);

        // We must detect if background reflection is active and adjust the
        // culling direction.
        // If the reflection is active, this means the post matrix has been
        // reflected as well,
        // therefore standard counter clockwise face culling will result in
        // "inside out" models.
        GLES20.glEnable(GLES20.GL_CULL_FACE);
        GLES20.glCullFace(GLES20.GL_BACK);

        if (tappingProjectionMatrix == null) {
            tappingProjectionMatrix = new Matrix44F();
            tappingProjectionMatrix.setData(projectionMatrix);
        }

        float temp[] = {0.0f, 0.0f, 0.0f};
        for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) {
            isTracking[i] = false;
            targetPositiveDimensions[i].setData(temp);
        }

        if (state.getNumTrackableResults() <= 0) {
            mActivity.setFindUI(false);
            if (currentStatus[1] == MEDIA_STATE.PLAYING) {
                TrackableResult trackableResult = state.getTrackableResult(state.getNumTrackables());
                drawVideo(trackableResult, 1, projectionMatrix, videoFullPlaybackShaderID);
            }
        } else {
            mActivity.setFindUI(true);
        }

        // Did we find any trackables this frame?
        for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
            // Get the trackable:
            TrackableResult trackableResult = state.getTrackableResult(tIdx);

            ImageTarget imageTarget = (ImageTarget) trackableResult
                    .getTrackable();

            int currentTarget;

            // We store the modelview matrix to be used later by the tap
            // calculation
            if (imageTarget.getName().compareTo("test2") == 0)
                currentTarget = VideoPlayback.TEST2;
            else
                currentTarget = VideoPlayback.TEST1;

            modelViewMatrix[currentTarget] = Tool
                    .convertPose2GLMatrix(trackableResult.getPose());

            isTracking[currentTarget] = true;

            targetPositiveDimensions[currentTarget] = imageTarget.getSize();

            // The pose delivers the center of the target, thus the dimensions
            // go from -width/2 to width/2, same for height
            temp[0] = targetPositiveDimensions[currentTarget].getData()[0] / 2.0f;
            temp[1] = targetPositiveDimensions[currentTarget].getData()[1] / 2.0f;
            targetPositiveDimensions[currentTarget].setData(temp);

            // 트리 출력
            if (imageTarget.getName().compareTo("test2") == 0) {
                drawObject(trackableResult, currentTarget, projectionMatrix, shaderProgramID);
            }

            // If the movie is ready to start playing or it has reached the end
            // of playback we render the keyframe
            if ((currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.READY)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.REACHED_END)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.NOT_READY)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.ERROR)) {

                /*float[] modelViewMatrixKeyframe = Tool.convertPose2GLMatrix(
                        trackableResult.getPose()).getData();
                float[] modelViewProjectionKeyframe = new float[16];
                // Matrix.translateM(modelViewMatrixKeyframe, 0, 0.0f, 0.0f,
                // targetPositiveDimensions[currentTarget].getData()[0]);

                // Here we use the aspect ratio of the keyframe since it
                // is likely that it is not a perfect square

                float ratio = 1.0f;
                if (mTextures.get(currentTarget).mSuccess)
                    ratio = keyframeQuadAspectRatio[currentTarget];
                else
                    ratio = targetPositiveDimensions[currentTarget].getData()[1]
                            / targetPositiveDimensions[currentTarget].getData()[0];

                Matrix.scaleM(modelViewMatrixKeyframe, 0,
                        targetPositiveDimensions[currentTarget].getData()[0],
                        targetPositiveDimensions[currentTarget].getData()[0]
                                * ratio,
                        targetPositiveDimensions[currentTarget].getData()[0]);
                Matrix.multiplyMM(modelViewProjectionKeyframe, 0,
                        projectionMatrix, 0, modelViewMatrixKeyframe, 0);

                GLES20.glUseProgram(keyframeShaderID);

                // Prepare for rendering the keyframe
                GLES20.glVertexAttribPointer(keyframeVertexHandle, 3,
                        GLES20.GL_FLOAT, false, 0, quadVertices);
                GLES20.glVertexAttribPointer(keyframeTexCoordHandle, 2,
                        GLES20.GL_FLOAT, false, 0, quadTexCoords);

                GLES20.glEnableVertexAttribArray(keyframeVertexHandle);
                GLES20.glEnableVertexAttribArray(keyframeTexCoordHandle);

                GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

                // The first loaded texture from the assets folder is the
                // keyframe
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                        mTextures.get(currentTarget).mTextureID[0]);
                GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false,
                        modelViewProjectionKeyframe, 0);
                GLES20.glUniform1i(keyframeTexSampler2DHandle, 0);

                // Render
                GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX,
                        GLES20.GL_UNSIGNED_SHORT, quadIndices);

                GLES20.glDisableVertexAttribArray(keyframeVertexHandle);
                GLES20.glDisableVertexAttribArray(keyframeTexCoordHandle);

                GLES20.glUseProgram(0);*/
            } else
            // In any other case, such as playing or paused, we render
            // the actual contents
            {
                Logger.d("imageTarget.getName() 2 : " + imageTarget.getName());
                if (imageTarget.getName().compareTo("test1") == 0) {
                    // 영상 출력
                    drawVideo(trackableResult, currentTarget, projectionMatrix, videoPlaybackShaderID);
                }
            }

            // 플레이 아이콘 출력
            if ((currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.READY)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.REACHED_END)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.PAUSED)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.NOT_READY)
                    || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.ERROR)) {

                Logger.d("imageTarget.getName() 1 : " + imageTarget.getName());
                if (imageTarget.getName().compareTo("test1") == 0) {
                    // If the movie is ready to be played, pause, has reached end or
                    // is not
                    // ready then we display one of the icons
                    float[] modelViewMatrixButton = Tool.convertPose2GLMatrix(
                            trackableResult.getPose()).getData();
                    float[] modelViewProjectionButton = new float[16];

                    GLES20.glDepthFunc(GLES20.GL_LEQUAL);

                    GLES20.glEnable(GLES20.GL_BLEND);
                    GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA,
                            GLES20.GL_ONE_MINUS_SRC_ALPHA);

                    // The inacuracy of the rendering process in some devices means
                    // that
                    // even if we use the "Less or Equal" version of the depth
                    // function
                    // it is likely that we will get ugly artifacts
                    // That is the translation in the Z direction is slightly
                    // different
                    // Another posibility would be to use a depth func "ALWAYS" but
                    // that is typically not a good idea
                    Matrix
                            .translateM(
                                    modelViewMatrixButton,
                                    0,
                                    0.0f,
                                    0.0f,
                                    targetPositiveDimensions[currentTarget].getData()[1] / 10.98f);
                    Matrix
                            .scaleM(
                                    modelViewMatrixButton,
                                    0,
                                    (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f),
                                    (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f),
                                    (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f));
                    Matrix.multiplyMM(modelViewProjectionButton, 0,
                            projectionMatrix, 0, modelViewMatrixButton, 0);

                    GLES20.glUseProgram(keyframeShaderID);

                    GLES20.glVertexAttribPointer(keyframeVertexHandle, 3,
                            GLES20.GL_FLOAT, false, 0, quadVertices);
                    GLES20.glVertexAttribPointer(keyframeTexCoordHandle, 2,
                            GLES20.GL_FLOAT, false, 0, quadTexCoords);

                    GLES20.glEnableVertexAttribArray(keyframeVertexHandle);
                    GLES20.glEnableVertexAttribArray(keyframeTexCoordHandle);

                    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

                    // Depending on the status in which we are we choose the
                    // appropriate
                    // texture to display. Notice that unlike the video these are
                    // regular
                    // GL_TEXTURE_2D textures
                    switch (currentStatus[currentTarget]) {
                        case READY:
                            /** 자동실행 **************************************/
                            //if (currentStatus[currentTarget] != MEDIA_STATE.PLAYING && !isAutoStart) {
                            mVideoPlayerHelper[currentTarget].play(false,
                                    mSeekPosition[currentTarget]);
                            isAutoStart = true;
                            //} else {
                            /** 자동실행 **************************************/
                            break;
                        case REACHED_END:
                            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                                    mTextures.get(0).mTextureID[0]);
                            break;
                        case PAUSED:
                            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                                    mTextures.get(0).mTextureID[0]);
                            break;
                        case NOT_READY:
                            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                                    mTextures.get(1).mTextureID[0]);
                            break;
                        case ERROR:
                            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                                    mTextures.get(2).mTextureID[0]);
                            break;
                        default:
                            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                                    mTextures.get(1).mTextureID[0]);
                            break;
                    }
                    GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false,
                            modelViewProjectionButton, 0);
                    GLES20.glUniform1i(keyframeTexSampler2DHandle, 0);

                    // Render
                    GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX,
                            GLES20.GL_UNSIGNED_SHORT, quadIndices);

                    GLES20.glDisableVertexAttribArray(keyframeVertexHandle);
                    GLES20.glDisableVertexAttribArray(keyframeTexCoordHandle);

                    GLES20.glUseProgram(0);

                    // Finally we return the depth func to its original state
                    GLES20.glDepthFunc(GLES20.GL_LESS);
                    GLES20.glDisable(GLES20.GL_BLEND);
                }
            }

            SampleUtils.checkGLError("VideoPlayback renderFrame");
        }

        // Finally we return the depth func to its original state
        GLES20.glDepthFunc(GLES20.GL_LESS);
        GLES20.glDisable(GLES20.GL_BLEND);

        GLES20.glDisable(GLES20.GL_DEPTH_TEST);

        GLES20.glUseProgram(0);

        Renderer.getInstance().end();

    }

    private void drawObject(TrackableResult trackableResult, int currentTarget, float[] projectionMatrix, int shader) {
        /*if (mTree == null) {
            mActivity.runOnUiThread(new Runnable() {
                @Override
                public void run() {
                    Toast.makeText(mActivity, "잠시 후 다시 비춰주세요.", Toast.LENGTH_SHORT).show();
                }
            });

            return;
        }*/
        float[] modelViewProjection = new float[16];
        Matrix44F modelViewMatrix_Vuforia = Tool
                .convertPose2GLMatrix(trackableResult.getPose());
        float[] modelViewMatrix = modelViewMatrix_Vuforia.getData();

        Matrix.rotateM(modelViewMatrix, 0, 90.0f, 1.0f, 0, 0);
        Matrix.scaleM(modelViewMatrix, 0, kTreeScale,
                kTreeScale, kTreeScale);
        Matrix.multiplyMM(modelViewProjection, 0, projectionMatrix, 0, modelViewMatrix, 0);

        GLES20.glUseProgram(shader);

        GLES20.glDisable(GLES20.GL_CULL_FACE);
        GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT,
                false, 0, mTree.getVertices());
        GLES20.glVertexAttribPointer(textureCoordHandle, 2,
                GLES20.GL_FLOAT, false, 0, mTree.getTexCoords());

        GLES20.glEnableVertexAttribArray(vertexHandle);
        GLES20.glEnableVertexAttribArray(textureCoordHandle);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,
                mTextures.get(3).mTextureID[0]);
        GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false,
                modelViewProjection, 0);
        GLES20.glUniform1i(keyframeTexSampler2DHandle, 0);
        GLES20.glDrawArrays(GLES20.GL_TRIANGLES, 0,
                mTree.getNumObjectVertex());

        SampleUtils.checkGLError("Renderer drawObject");
    }

    private void drawVideo(TrackableResult trackableResult, int currentTarget, float[] projectionMatrix, int shader) {
        float[] modelViewMatrixVideo;
        if (trackableResult == null) {
            modelViewMatrixVideo = new float[16];
        } else {
            modelViewMatrixVideo = Tool.convertPose2GLMatrix(
                    trackableResult.getPose()).getData();
        }
        float[] modelViewProjectionVideo = new float[16];
        // Matrix.translateM(modelViewMatrixVideo, 0, 0.0f, 0.0f,
        // targetPositiveDimensions[currentTarget].getData()[0]);

        // Here we use the aspect ratio of the video frame
        Matrix.scaleM(modelViewMatrixVideo, 0,
                targetPositiveDimensions[currentTarget].getData()[0],
                targetPositiveDimensions[currentTarget].getData()[0]
                        * videoQuadAspectRatio[currentTarget],
                targetPositiveDimensions[currentTarget].getData()[0]);
        Matrix.multiplyMM(modelViewProjectionVideo, 0,
                projectionMatrix, 0, modelViewMatrixVideo, 0);

        /** keying **************************************/
        GLES20.glEnable(GLES20.GL_BLEND);
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);
        /** keying **************************************/

        GLES20.glUseProgram(shader);

        // Prepare for rendering the keyframe
        GLES20.glVertexAttribPointer(videoPlaybackVertexHandle, 3,
                GLES20.GL_FLOAT, false, 0, quadVertices);

        /*if (imageTarget.getName().compareTo("zzz") == 0)
            GLES20.glVertexAttribPointer(videoPlaybackTexCoordHandle,
                    2, GLES20.GL_FLOAT, false, 0,
                    fillBuffer(videoQuadTextureCoordsTransformedStones));
        else*/
        GLES20.glVertexAttribPointer(videoPlaybackTexCoordHandle,
                2, GLES20.GL_FLOAT, false, 0,
                fillBuffer(videoQuadTextureCoordsTransformedChips));

        GLES20.glEnableVertexAttribArray(videoPlaybackVertexHandle);
        GLES20.glEnableVertexAttribArray(videoPlaybackTexCoordHandle);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

        // IMPORTANT:
        // Notice here that the texture that we are binding is not the
        // typical GL_TEXTURE_2D but instead the GL_TEXTURE_EXTERNAL_OES
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
                videoPlaybackTextureID[currentTarget]);
        GLES20.glUniformMatrix4fv(videoPlaybackMVPMatrixHandle, 1,
                false, modelViewProjectionVideo, 0);
        GLES20.glUniform1i(videoPlaybackTexSamplerOESHandle, 0);

        // Render
        GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX,
                GLES20.GL_UNSIGNED_SHORT, quadIndices);

        GLES20.glDisableVertexAttribArray(videoPlaybackVertexHandle);
        GLES20.glDisableVertexAttribArray(videoPlaybackTexCoordHandle);

//        GLES20.glUseProgram(0);

        /** keying **************************************/
        GLES20.glDisable(GLES20.GL_BLEND);
        /** keying **************************************/
    }

    boolean isTapOnScreenInsideTarget(int target, float x, float y) {
        // Here we calculate that the touch event is inside the target
        Vec3F intersection;
        // Vec3F lineStart = new Vec3F();
        // Vec3F lineEnd = new Vec3F();

        DisplayMetrics metrics = new DisplayMetrics();
        mActivity.getWindowManager().getDefaultDisplay().getMetrics(metrics);
        intersection = SampleMath.getPointToPlaneIntersection(SampleMath
                        .Matrix44FInverse(tappingProjectionMatrix),
                modelViewMatrix[target], metrics.widthPixels, metrics.heightPixels,
                new Vec2F(x, y), new Vec3F(0, 0, 0), new Vec3F(0, 0, 1));

        // The target returns as pose the center of the trackable. The following
        // if-statement simply checks that the tap is within this range
        if ((intersection.getData()[0] >= -(targetPositiveDimensions[target]
                .getData()[0]))
                && (intersection.getData()[0] <= (targetPositiveDimensions[target]
                .getData()[0]))
                && (intersection.getData()[1] >= -(targetPositiveDimensions[target]
                .getData()[1]))
                && (intersection.getData()[1] <= (targetPositiveDimensions[target]
                .getData()[1])))
            return true;
        else
            return false;
    }


    void setVideoDimensions(int target, float videoWidth, float videoHeight,
                            float[] textureCoordMatrix) {
        // The quad originaly comes as a perfect square, however, the video
        // often has a different aspect ration such as 4:3 or 16:9,
        // To mitigate this we have two options:
        // 1) We can either scale the width (typically up)
        // 2) We can scale the height (typically down)
        // Which one to use is just a matter of preference. This example scales
        // the height down.
        // (see the render call in renderFrame)
        videoQuadAspectRatio[target] = videoHeight / videoWidth;

        float mtx[] = textureCoordMatrix;
        float tempUVMultRes[] = new float[2];

        if (target == VideoPlayback.TEST2) {
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedStones[0],
                    videoQuadTextureCoordsTransformedStones[1],
                    videoQuadTextureCoords[0], videoQuadTextureCoords[1], mtx);
            videoQuadTextureCoordsTransformedStones[0] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedStones[1] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedStones[2],
                    videoQuadTextureCoordsTransformedStones[3],
                    videoQuadTextureCoords[2], videoQuadTextureCoords[3], mtx);
            videoQuadTextureCoordsTransformedStones[2] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedStones[3] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedStones[4],
                    videoQuadTextureCoordsTransformedStones[5],
                    videoQuadTextureCoords[4], videoQuadTextureCoords[5], mtx);
            videoQuadTextureCoordsTransformedStones[4] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedStones[5] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedStones[6],
                    videoQuadTextureCoordsTransformedStones[7],
                    videoQuadTextureCoords[6], videoQuadTextureCoords[7], mtx);
            videoQuadTextureCoordsTransformedStones[6] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedStones[7] = tempUVMultRes[1];
        } else if (target == VideoPlayback.TEST1) {
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedChips[0],
                    videoQuadTextureCoordsTransformedChips[1],
                    videoQuadTextureCoords[0], videoQuadTextureCoords[1], mtx);
            videoQuadTextureCoordsTransformedChips[0] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedChips[1] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedChips[2],
                    videoQuadTextureCoordsTransformedChips[3],
                    videoQuadTextureCoords[2], videoQuadTextureCoords[3], mtx);
            videoQuadTextureCoordsTransformedChips[2] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedChips[3] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedChips[4],
                    videoQuadTextureCoordsTransformedChips[5],
                    videoQuadTextureCoords[4], videoQuadTextureCoords[5], mtx);
            videoQuadTextureCoordsTransformedChips[4] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedChips[5] = tempUVMultRes[1];
            tempUVMultRes = uvMultMat4f(
                    videoQuadTextureCoordsTransformedChips[6],
                    videoQuadTextureCoordsTransformedChips[7],
                    videoQuadTextureCoords[6], videoQuadTextureCoords[7], mtx);
            videoQuadTextureCoordsTransformedChips[6] = tempUVMultRes[0];
            videoQuadTextureCoordsTransformedChips[7] = tempUVMultRes[1];
        }

        // textureCoordMatrix = mtx;
    }


    // Multiply the UV coordinates by the given transformation matrix
    float[] uvMultMat4f(float transformedU, float transformedV, float u,
                        float v, float[] pMat) {
        float x = pMat[0] * u + pMat[4] * v /* + pMat[ 8]*0.f */ + pMat[12]
                * 1.f;
        float y = pMat[1] * u + pMat[5] * v /* + pMat[ 9]*0.f */ + pMat[13]
                * 1.f;
        // float z = pMat[2]*u + pMat[6]*v + pMat[10]*0.f + pMat[14]*1.f; // We
        // dont need z and w so we comment them out
        // float w = pMat[3]*u + pMat[7]*v + pMat[11]*0.f + pMat[15]*1.f;

        float result[] = new float[2];
        // transformedU = x;
        // transformedV = y;
        result[0] = x;
        result[1] = y;
        return result;
    }


    void setStatus(int target, int value) {
        // Transform the value passed from java to our own values
        switch (value) {
            case 0:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.REACHED_END;
                break;
            case 1:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.PAUSED;
                break;
            case 2:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.STOPPED;
                break;
            case 3:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.PLAYING;
                break;
            case 4:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.READY;
                break;
            case 5:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.NOT_READY;
                break;
            case 6:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.ERROR;
                break;
            default:
                currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.NOT_READY;
                break;
        }
    }


    boolean isTracking(int target) {
        return isTracking[target];
    }


    public void setTextures(Vector<Texture> textures) {
        mTextures = textures;
    }

}
제목 날짜
경주 단체펜션 2018.09.06
가족 행사 2018.09.03
Vuforia 2018.06.18
Google I/O Extended 2018.06.10
2018.4.22(일) 드로이드 나이츠 세미나 2018.04.22
2018.03.19(월) 펫 프로젝트 미팅 2018.03.19
태그 쓰기 목록