1

このリポジトリを参照して、プロジェクト タンゴを使用して屋内ナビゲーション アプリに取り組んでいます。開始点と終了点がアダプターに設定されると、すべての有効なポーズが更新され、パスが検出されます。有効なポーズから抽出された特定の座標の Quadtree が既に構築されているため、A* アルゴリズムを使用して 2 点間の最短経路を見つけます。Start Of Serviceに関してはうまく機能します

ADF をロードして ADF に関するポーズを更新すると、パスが表示されないか、サイズがゼロのパスが表示されます。私も、有効なポーズからすべての座標を収集して、Quadtree を構築し、パスを見つけようとしました。サイズが 0 のパスを返します。パスは Vector2 オブジェクトのコレクションです。

活動クラス

    public class SoSPathActivity extends AppCompatActivity implements Tango.OnTangoUpdateListener {

    // frame pairs for adf based ar pose tracking
    public static final TangoCoordinateFramePair SOS_T_DEVICE_FRAME_PAIR =
            new TangoCoordinateFramePair(
                    TangoPoseData.COORDINATE_FRAME_START_OF_SERVICE,
                    TangoPoseData.COORDINATE_FRAME_DEVICE);
    public static final TangoCoordinateFramePair DEVICE_T_PREVIOUS_FRAME_PAIR =
            new TangoCoordinateFramePair(
                    TangoPoseData.COORDINATE_FRAME_PREVIOUS_DEVICE_POSE,
                    TangoPoseData.COORDINATE_FRAME_DEVICE);

    // This changes the Camera Texture and Intrinsics
    protected static final int ACTIVE_CAMERA_INTRINSICS = TangoCameraIntrinsics.TANGO_CAMERA_COLOR;
    protected static final int INVALID_TEXTURE_ID = -1;
    private static final String TAG = SoSPathActivity.class.getSimpleName();
    protected AtomicBoolean tangoIsConnected = new AtomicBoolean(false);
    protected AtomicBoolean tangoFrameIsAvailable = new AtomicBoolean(false);

    protected Tango tango;
    protected TangoUx tangoUx;
    protected TangoCameraIntrinsics intrinsics;
    protected DeviceExtrinsics extrinsics;

    protected int connectedTextureId;
    protected double rgbFrameTimestamp;
    protected double cameraPoseTimestamp;

    protected SoSPathRenderer renderer;


    RajawaliSurfaceView mainSurfaceView;
    Toolbar toolbar;
    TangoUxLayout uxLayout;
    MapView mapView;
    private TangoPointCloudManager mPointCloudManager;

    private static DeviceExtrinsics setupExtrinsics(Tango tango) {
        // Create camera to IMU transform.
        TangoCoordinateFramePair framePair = new TangoCoordinateFramePair();
        framePair.baseFrame = TangoPoseData.COORDINATE_FRAME_IMU;
        framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_COLOR;
        TangoPoseData imuToRgbPose = tango.getPoseAtTime(0.0, framePair);

        // Create device to IMU transform.
        framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_DEVICE;
        TangoPoseData imuToDevicePose = tango.getPoseAtTime(0.0, framePair);

        // Create depth camera to IMU transform.
        framePair.targetFrame = TangoPoseData.COORDINATE_FRAME_CAMERA_DEPTH;
        TangoPoseData imuToDepthPose = tango.getPoseAtTime(0.0, framePair);

        return new DeviceExtrinsics(imuToDevicePose, imuToRgbPose, imuToDepthPose);
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        tango = new Tango(this);
        tangoUx = new TangoUx(this);
        renderer = new SoSPathRenderer(this);

        setContentView(R.layout.main_layout);

        mainSurfaceView = (RajawaliSurfaceView)findViewById(R.id.gl_main_surface_view);
        toolbar = (Toolbar)findViewById(R.id.toolbar);
        uxLayout = (TangoUxLayout)findViewById(R.id.tango_ux_layout);
        mapView = (MapView)findViewById(R.id.map_view);

        setSupportActionBar(toolbar);
        tangoUx.setLayout(uxLayout);
        renderer.renderVirtualObjects(true);
        mainSurfaceView.setSurfaceRenderer(renderer);
        mainSurfaceView.setZOrderOnTop(false);
        mapView.setFloorPlanData(renderer.getFloorPlanData());

        mPointCloudManager = new TangoPointCloudManager();
    }

    @Override
    protected void onResume() {
        super.onResume();
        synchronized (this) {
            if (tangoIsConnected.compareAndSet(false, true)) {
                try {
                    connectTango();
                    connectRenderer();
                } catch (TangoOutOfDateException e) {
                    message(R.string.exception_out_of_date);
                }
            }
        }
    }

    @Override
    protected void onPause() {
        super.onPause();
        synchronized (this) {
            if (tangoIsConnected.compareAndSet(true, false)) {
                renderer.getCurrentScene().clearFrameCallbacks();
                tango.disconnectCamera(ACTIVE_CAMERA_INTRINSICS);
                connectedTextureId = INVALID_TEXTURE_ID;
                tango.disconnect();
                tangoUx.stop();
            }
        }
    }

    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        getMenuInflater().inflate(R.menu.main_menu, menu);
        return super.onCreateOptionsMenu(menu);
    }

    @Override
    public boolean onOptionsItemSelected(MenuItem item) {
        switch (item.getItemId()) {
            case R.id.set_start_point:
                renderer.setStartPoint(getCurrentPose(), extrinsics);
                break;
            case R.id.set_end_point:
                renderer.setEndPoint(getCurrentPose(), extrinsics
                );
                break;
        }
        return super.onOptionsItemSelected(item);
    }

    @Override
    public void onFrameAvailable(int cameraId) {
        if (cameraId == ACTIVE_CAMERA_INTRINSICS) {
            tangoFrameIsAvailable.set(true);
            mainSurfaceView.requestRender();
        }
    }

    @Override
    public void onTangoEvent(TangoEvent event) {
        if (tangoUx != null) {
            tangoUx.updateTangoEvent(event);
        }
    }

    @Override
    public void onPoseAvailable(TangoPoseData pose) {
        if (tangoUx != null) {
            tangoUx.updatePoseStatus(pose.statusCode);
        }
    }

    @Override
    public void onXyzIjAvailable(TangoXyzIjData xyzIj) {
        if (tangoUx != null) {
            tangoUx.updateXyzCount(xyzIj.xyzCount);
        }
    }

    private void message(final int message_resource) {
        Toast.makeText(this, message_resource, Toast.LENGTH_SHORT).show();
    }

    protected void setupCameraProperties(Tango tango) {
        extrinsics = setupExtrinsics(tango);
        intrinsics = tango.getCameraIntrinsics(ACTIVE_CAMERA_INTRINSICS);
    }


    protected void connectTango() {
        TangoUx.StartParams params = new TangoUx.StartParams();
        tangoUx.start(params);
        TangoConfig config = tango.getConfig(TangoConfig.CONFIG_TYPE_DEFAULT);
        config.putBoolean(TangoConfig.KEY_BOOLEAN_LOWLATENCYIMUINTEGRATION, true);
        config.putBoolean(TangoConfig.KEY_BOOLEAN_COLORCAMERA, true);
        tango.connect(config);
        ArrayList<TangoCoordinateFramePair> framePairs = new ArrayList<>();
        framePairs.add(SOS_T_DEVICE_FRAME_PAIR);
        framePairs.add(DEVICE_T_PREVIOUS_FRAME_PAIR);
        tango.connectListener(framePairs, this);
        setupCameraProperties(tango);
    }


    public TangoPoseData getCurrentPose() {
        return tango.getPoseAtTime(rgbFrameTimestamp, SOS_T_DEVICE_FRAME_PAIR);
    }

    int position = 0;
    protected void connectRenderer() {
        renderer.getCurrentScene().registerFrameCallback(new ScenePreFrameCallbackAdapter() {
            @Override
            public void onPreFrame(long sceneTime, double deltaTime) {
                synchronized (SoSPathActivity.this) {
                    if (!tangoIsConnected.get()) {
                        return;
                    }
                    if (!renderer.isSceneCameraConfigured()) {
                        renderer.setProjectionMatrix(intrinsics);
                    }
                    if (connectedTextureId != renderer.getTextureId()) {
                        tango.connectTextureId(ACTIVE_CAMERA_INTRINSICS, renderer.getTextureId());
                        connectedTextureId = renderer.getTextureId();
                    }
                    if (tangoFrameIsAvailable.compareAndSet(true, false)) {
                        rgbFrameTimestamp = tango.updateTexture(ACTIVE_CAMERA_INTRINSICS);
                    }
                    if (rgbFrameTimestamp > cameraPoseTimestamp) {
                        TangoPoseData currentPose = getCurrentPose();
                        if (currentPose != null && currentPose.statusCode == TangoPoseData.POSE_VALID) {
                            renderer.updateRenderCameraPose(currentPose, extrinsics, position);
                            cameraPoseTimestamp = currentPose.timestamp;
                            position++;
                        }
                    }
                }
            }
        });
    }
}

これがRendererクラスです

    public class SoSPathRenderer extends TangoRajawaliRenderer {

    public static final int QUAD_TREE_START = -60;
    public static final int QUAD_TREE_RANGE = 120;
    private static final String TAG = SoSPathRenderer.class.getSimpleName();
    private final QuadTree data;
    // Rajawali texture used to render the Tango color camera
    private ATexture mTangoCameraTexture;
    // Keeps track of whether the scene camera has been configured
    private boolean mSceneCameraConfigured;

    private FloorPlan floorPlan;
    private Pose startPoint;
    private Pose endPoint;
    private List<Cube> pathCubes = new ArrayList<>();
    private boolean fillPath = false;
    private Material blue;
    private boolean renderVirtualObjects;

    private Vector3 startingPoint;
    private Vector3 endingPoint;

    public SoSPathRenderer(Context context) {
        super(context);
        data = new QuadTree(new Vector2(QUAD_TREE_START, QUAD_TREE_START), QUAD_TREE_RANGE, 8);
    }

    @Override
    protected void initScene() {
        // Create a quad covering the whole background and assign a texture to it where the
        // Tango color camera contents will be rendered.
        ScreenQuad backgroundQuad = new ScreenQuad();
        Material tangoCameraMaterial = new Material();
        tangoCameraMaterial.setColorInfluence(0);
        // We need to use Rajawali's {@code StreamingTexture} since it sets up the texture
        // for GL_TEXTURE_EXTERNAL_OES rendering
        mTangoCameraTexture =
                new StreamingTexture("camera", (StreamingTexture.ISurfaceListener) null);
        try {
            tangoCameraMaterial.addTexture(mTangoCameraTexture);
            backgroundQuad.setMaterial(tangoCameraMaterial);
        } catch (ATexture.TextureException e) {
            Log.e(TAG, "Exception creating texture for RGB camera contents", e);
        }
        getCurrentScene().addChildAt(backgroundQuad, 0);

        // Add a directional light in an arbitrary direction.
        DirectionalLight light = new DirectionalLight(1, 0.2, -1);
        light.setColor(1, 1, 1);
        light.setPower(0.8f);
        light.setPosition(3, 2, 4);
        getCurrentScene().addLight(light);

        blue = new Material();
        blue.setColor(Color.BLUE);

        floorPlan = new FloorPlan(data);
        getCurrentScene().addChild(floorPlan);
        floorPlan.setVisible(renderVirtualObjects);

    }

    /**
     * Update the scene camera based on the provided pose in Tango start of service frame.
     * The device pose should match the pose of the device at the time the last rendered RGB
     * frame, which can be retrieved with this.getTimestamp();
     * NOTE: This must be called from the OpenGL render thread - it is not thread safe.
     */
    public void updateRenderCameraPose(TangoPoseData devicePose, DeviceExtrinsics extrinsics, int position) {
        Pose cameraPose = ScenePoseCalculator.toOpenGlCameraPose(devicePose, extrinsics);
        getCurrentCamera().setRotation(cameraPose.getOrientation());
        getCurrentCamera().setPosition(cameraPose.getPosition());

        Vector3 vector3 = cameraPose.getPosition();
        floorPlan.setTrajectoryPosition(cameraPose.getPosition());
        Log.d(TAG, "P: " + cameraPose.toString());

        /*if(position<getLatestPathPoints().size()) {
            Log.d(TAG, "XXX Adding ADF Pose position into FloorPlan (x,y,z): " + getLatestPathPoints().get(position).x + ", "
                    + getLatestPathPoints().get(position).y + ", " + getLatestPathPoints().get(position).z);
            floorPlan.setTrajectoryPosition(getLatestPathPoints().get(position));
        }*/
    }

    /**
     * It returns the ID currently assigned to the texture where the Tango color camera contents
     * should be rendered.
     * NOTE: This must be called from the OpenGL render thread - it is not thread safe.
     */
    public int getTextureId() {
        return mTangoCameraTexture == null ? -1 : mTangoCameraTexture.getTextureId();
    }

    /**
     * We need to override this method to mark the camera for re-configuration (set proper
     * projection matrix) since it will be reset by Rajawali on surface changes.
     */
    @Override
    public void onRenderSurfaceSizeChanged(GL10 gl, int width, int height) {
        super.onRenderSurfaceSizeChanged(gl, width, height);
        mSceneCameraConfigured = false;
    }

    public boolean isSceneCameraConfigured() {
        return mSceneCameraConfigured;
    }

    /**
     * Sets the projection matrix for the scene camera to match the parameters of the color camera,
     * provided by the {@code TangoCameraIntrinsics}.
     */
    public void setProjectionMatrix(TangoCameraIntrinsics intrinsics) {
        Matrix4 projectionMatrix = ScenePoseCalculator.calculateProjectionMatrix(
                intrinsics.width, intrinsics.height,
                intrinsics.fx, intrinsics.fy, intrinsics.cx, intrinsics.cy);
        getCurrentCamera().setProjectionMatrix(projectionMatrix);
    }

    @Override
    public void onOffsetsChanged(float xOffset, float yOffset,
                                 float xOffsetStep, float yOffsetStep,
                                 int xPixelOffset, int yPixelOffset) {
    }

    @Override
    public void onTouchEvent(MotionEvent event) {

    }

    @Override
    protected void onRender(long ellapsedRealtime, double deltaTime) {
        super.onRender(ellapsedRealtime, deltaTime);

        // add routing cubes to scene graph if available
        if (fillPath) {
            for (Cube pathCube : pathCubes) {
                getCurrentScene().removeChild(pathCube);
            }
            pathCubes.clear();
            PathFinder finder = new PathFinder(floorPlan.getData());
            try {
                List<Vector2> path = finder.findPathBetween(startPoint.getPosition(), endPoint.getPosition());
                //List<Vector2> path = finder.findPathBetween(startingPoint, endingPoint);
                Log.d(TAG, "XXX Pathpoints: " + path.size());
                for (Vector2 vector2 : path) {
                    Cube cube = new Cube(0.2f);
                    cube.setMaterial(blue);
                    cube.setPosition(new Vector3(vector2.getX(), -1.2, vector2.getY()));
                    getCurrentScene().addChild(cube);
                    pathCubes.add(cube);
                }
            } catch (Exception e) {
                Log.e(TAG, "onRender: " + e.getMessage(), e);
            } finally {
                fillPath = false;
            }
        }
    }

    public void setStartPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) {
        startPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics);
        floorPlan.addPoint(startPoint.getPosition());
        if (startPoint != null && endPoint != null) {
            fillPath = true;
        }
        /*startingPoint = getLatestPathPoints().get(0);
        floorPlan.addPoint(startingPoint);
        if (startingPoint != null && endingPoint != null) {
            fillPath = true;
        }*/
    }

    public void setEndPoint(TangoPoseData currentPose, DeviceExtrinsics extrinsics) {
        endPoint = ScenePoseCalculator.toOpenGlCameraPose(currentPose, extrinsics);
        floorPlan.addPoint(endPoint.getPosition());
        if (startPoint != null && endPoint != null) {
            fillPath = true;
        }
        /*endingPoint = getLatestPathPoints().get(getLatestPathPoints().size()-10);
        floorPlan.addPoint(endingPoint);
        if (startingPoint != null && endingPoint != null) {
            fillPath = true;
        }*/
    }

    public QuadTree getFloorPlanData() {
        return data;
    }

    public void renderVirtualObjects(boolean renderObjects) {
        renderVirtualObjects = renderObjects;
        if (this.floorPlan != null)
            this.floorPlan.setVisible(renderObjects);
    }

}

ADFをロードした後にパスを取得するためにここで何が欠けているのかわかりません。誰かがこれについて経験を持っている場合は、私を更新してください。

4

1 に答える 1

0

回答して申し訳ありませんが、コメントを書くのに十分な評判がありません。github などであなたのコード / プロジェクトを見せてもらえますか? 学習モードでも動作する必要があります。adf をロードした後、タブレットが再ローカライズされていないため、ポーズ データが無効であると想像できます。

編集: フレーム ペアが次のようになっているかどうかを確認します。

 /** Record Device to Area Description as the main frame pair to be used for device pose queries. */
private static final TangoCoordinateFramePair FRAME_PAIR = new TangoCoordinateFramePair(
        TangoPoseData.COORDINATE_FRAME_AREA_DESCRIPTION,
        TangoPoseData.COORDINATE_FRAME_DEVICE);

そして、エリア学習モードがオンになっていて、adf が正しく読み込まれているかどうかを確認します。

config.putBoolean(TangoConfig.KEY_BOOLEAN_LEARNINGMODE, true); //learning mode on
config.putString(TangoConfig.KEY_STRING_AREADESCRIPTION, mLoadedADFPair.getUuid()); //load adf

次に、それと同様の方法でポーズ データを要求するかどうかを確認します。

TangoPoseData lastFramePose = mTango.getPoseAtTime(mRgbTimestampGlThread,
                        FRAME_PAIR);
if (lastFramePose.statusCode == TangoPoseData.POSE_VALID) {

       // Device is re-located!               

       // Update the camera pose from the renderer
       mRenderer.updateRenderCameraPose(lastFramePose);
       mCameraPoseTimestamp = lastFramePose.timestamp;
} else {
       Log.w(TAG, "Can't get device pose at time: " + mRgbTimestampGlThread);
}

有効なポーズ データが利用可能になるまで、最大 3 ~ 5 分かかる場合があります。歩き回って、あきらめないでください。

于 2016-06-10T09:12:40.437 に答える