Java源码示例:com.google.ar.core.PointCloud

示例1
/**
 * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same point
 * cloud will be ignored.
 */
public void update(PointCloud cloud) {
    if (lastPointCloud == cloud) {
        // Redundant call.
        return;
    }

    ShaderUtil.checkGLError(TAG, "before update");

    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo);
    lastPointCloud = cloud;

    // If the VBO is not large enough to fit the new point cloud, resize it.
    numPoints = lastPointCloud.getPoints().remaining() / FLOATS_PER_POINT;
    if (numPoints * BYTES_PER_POINT > vboSize) {
        while (numPoints * BYTES_PER_POINT > vboSize) {
            vboSize *= 2;
        }
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW);
    }
    GLES20.glBufferSubData(
            GLES20.GL_ARRAY_BUFFER, 0, numPoints * BYTES_PER_POINT, lastPointCloud.getPoints());
    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

    ShaderUtil.checkGLError(TAG, "after update");
}
 
示例2
/**
 * Updates the OpenGL buffer contents to the provided point.  Repeated calls with the same
 * point cloud will be ignored.
 */
public void update(PointCloud cloud) {
    if (mLastPointCloud == cloud) {
        // Redundant call.
        return;
    }

    ShaderUtil.checkGLError(TAG, "before update");

    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mVbo);
    mLastPointCloud = cloud;

    // If the VBO is not large enough to fit the new point cloud, resize it.
    mNumPoints = mLastPointCloud.getPoints().remaining() / FLOATS_PER_POINT;
    if (mNumPoints * BYTES_PER_POINT > mVboSize) {
        while (mNumPoints * BYTES_PER_POINT > mVboSize) {
            mVboSize *= 2;
        }
        GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, mVboSize, null, GLES20.GL_DYNAMIC_DRAW);
    }
    GLES20.glBufferSubData(GLES20.GL_ARRAY_BUFFER, 0, mNumPoints * BYTES_PER_POINT,
        mLastPointCloud.getPoints());
    GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

    ShaderUtil.checkGLError(TAG, "after update");
}
 
示例3
/**
 * Updates the OpenGL buffer contents to the provided point. Repeated calls with the same point
 * cloud will be ignored.
 */
public void update(PointCloud cloud) {
  if (lastPointCloud == cloud) {
    // Redundant call.
    return;
  }

  ShaderUtil.checkGLError(TAG, "before update");

  GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, vbo);
  lastPointCloud = cloud;

  // If the VBO is not large enough to fit the new point cloud, resize it.
  numPoints = lastPointCloud.getPoints().remaining() / FLOATS_PER_POINT;
  if (numPoints * BYTES_PER_POINT > vboSize) {
    while (numPoints * BYTES_PER_POINT > vboSize) {
      vboSize *= 2;
    }
    GLES20.glBufferData(GLES20.GL_ARRAY_BUFFER, vboSize, null, GLES20.GL_DYNAMIC_DRAW);
  }
  GLES20.glBufferSubData(
      GLES20.GL_ARRAY_BUFFER, 0, numPoints * BYTES_PER_POINT, lastPointCloud.getPoints());
  GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, 0);

  ShaderUtil.checkGLError(TAG, "after update");
}
 
示例4
/**
 * renderScene() clears the Color Buffer and Depth Buffer, draws the current texture from the
 * camera
 * and draws the Line Renderer if ARCore is tracking the world around it
 */
private void renderScene() {
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

    if (mFrame != null) {
        mBackgroundRenderer.draw(mFrame);
    }

    // Draw debug anchors
    if (BuildConfig.DEBUG) {
        if (mFrame.getCamera().getTrackingState() == TrackingState.TRACKING) {
            zeroAnchorRenderer.draw(viewmtx, projmtx, false);
        }
    }

    // Draw background.
    if (mFrame != null) {

        // Draw Lines
        if (mTrackingIndicator.isTracking() || (
                // keep painting through 5 frames where we're not tracking
                (bHasTracked.get() && mFramesNotTracked < MAX_UNTRACKED_FRAMES))) {

            if (!mTrackingIndicator.isTracking()) {
                mFramesNotTracked++;
            } else {
                mFramesNotTracked = 0;
            }

            // If the anchor is set, set the modelMatrix of the line renderer to offset to the anchor
            if (mAnchor != null && mAnchor.getTrackingState() == TrackingState.TRACKING) {
                mAnchor.getPose().toMatrix(mLineShaderRenderer.mModelMatrix, 0);

                if (BuildConfig.DEBUG) {
                    mAnchor.getPose().toMatrix(cloudAnchorRenderer.mModelMatrix, 0);
                    cloudAnchorRenderer.draw(viewmtx, projmtx, true);
                }
            }

            // Render the lines
            mLineShaderRenderer
                    .draw(viewmtx, projmtx, mScreenWidth, mScreenHeight,
                            AppSettings.getNearClip(),
                            AppSettings.getFarClip());
        }

        if (mDebugEnabled) {
            mHandler.post(new Runnable() {
                @Override
                public void run() {
                    mDebugView.setAnchorTracking(mAnchor);
                }
            });
        }
    }

    if (mMode == Mode.PAIR_PARTNER_DISCOVERY || mMode == Mode.PAIR_ANCHOR_RESOLVING) {
        if (mFrame != null) {
            PointCloud pointCloud = mFrame.acquirePointCloud();
            this.pointCloud.update(pointCloud);
            this.pointCloud.draw(viewmtx, projmtx);

            // Application is responsible for releasing the point cloud resources after
            // using it.
            pointCloud.release();
        }
    }

}
 
示例5
@Override
public void onDrawFrame(GL10 gl) {
  // Clear screen to notify driver it should not load any pixels from previous frame.
  GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

  // If we are ready to import the object and haven't done so yet, do it now.
  if (mReadyToImport && virtualObject == null) {
    importDownloadedObject();
  }

  if (session == null) {
    return;
  }
  // Notify ARCore session that the view size changed so that the perspective matrix and
  // the video background can be properly adjusted.
  displayRotationHelper.updateSessionIfNeeded(session);

  try {
    session.setCameraTextureName(backgroundRenderer.getTextureId());

    // Obtain the current frame from ARSession. When the configuration is set to
    // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
    // camera framerate.
    Frame frame = session.update();
    Camera camera = frame.getCamera();

    // Handle one tap per frame.
    handleTap(frame, camera);

    // Draw background.
    backgroundRenderer.draw(frame);

    // If not tracking, don't draw 3d objects.
    if (camera.getTrackingState() == TrackingState.PAUSED) {
      return;
    }

    // Get projection matrix.
    float[] projmtx = new float[16];
    camera.getProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

    // Get camera matrix and draw.
    float[] viewmtx = new float[16];
    camera.getViewMatrix(viewmtx, 0);

    // Compute lighting from average intensity of the image.
    // The first three components are color scaling factors.
    // The last one is the average pixel intensity in gamma space.
    final float[] colorCorrectionRgba = new float[4];
    frame.getLightEstimate().getColorCorrection(colorCorrectionRgba, 0);

    // Visualize tracked points.
    PointCloud pointCloud = frame.acquirePointCloud();
    pointCloudRenderer.update(pointCloud);
    pointCloudRenderer.draw(viewmtx, projmtx);

    // Application is responsible for releasing the point cloud resources after
    // using it.
    pointCloud.release();

    // Check if we detected at least one plane. If so, hide the loading message.
    if (messageSnackbarHelper.isShowing()) {
      for (Plane plane : session.getAllTrackables(Plane.class)) {
        if (plane.getTrackingState() == TrackingState.TRACKING) {
          messageSnackbarHelper.hide(this);
          break;
        }
      }
    }

    // Visualize planes.
    planeRenderer.drawPlanes(
        session.getAllTrackables(Plane.class), camera.getDisplayOrientedPose(), projmtx);

    // Visualize anchors created by touch.
    float scaleFactor = 1.0f;
    for (Anchor anchor : anchors) {
      if (anchor.getTrackingState() != TrackingState.TRACKING) {
        continue;
      }
      // Get the current pose of an Anchor in world space. The Anchor pose is updated
      // during calls to session.update() as ARCore refines its estimate of the world.
      anchor.getPose().toMatrix(anchorMatrix, 0);

      // Update and draw the model.
      if (virtualObject != null) {
        virtualObject.updateModelMatrix(anchorMatrix, ASSET_SCALE * scaleFactor);
        virtualObject.draw(viewmtx, projmtx, colorCorrectionRgba);

        // If we haven't yet showing the attribution toast, do it now.
        if (!mShowedAttributionToast) {
          showAttributionToast();
        }
      }
    }

  } catch (Throwable t) {
    // Avoid crashing the application due to unhandled exceptions.
    Log.e(TAG, "Exception on the OpenGL thread", t);
  }
}