I was wondering if there is an official way to detect device feature support for hand-tracking. The hand input demos now require hand-tracking but vision OS simulator has no input source and so fails to launch.
I need to support devices with controllers, and add hand models if it supports it not just add them.
There doesn’t seem to be a webxr api what the webxr session supported if I setup “hand-tracking” as optionalFeatures. There is no webxr api to detect what features are supported before launching. There is no method to detect if I can add the hand models or not.
I see webxr in chrome provides a supported feature list. This might help decide if to add the hand models or not. in visionOS simulator its not there. I’m not even sure if its in visionOS
enabledFeatures:['viewer', 'local', 'bounded-floor', 'local-floor']
It’s potential the only way to determine hand-tracking for visionOS is platform detect MacIntel / Ipad I already have to do to force video playback and if there is inputSources. And search for hand-tracking feature detection if its there. Then add hand models. The demos assume hand-tracking is available so not designed to be cross compatible with everything.
For the hand example this should work to determine if to add hand models with visionOS and others.
controller1 = renderer.xr.getController( 0 );
scene.add( controller1 );
controller2 = renderer.xr.getController( 1 );
scene.add( controller2 );
controller1.addEventListener( 'connected', function ( event ) {
//console.log("connected ", event.data.handedness );
const geometry = new THREE.BufferGeometry().setFromPoints( [ new THREE.Vector3( 0, 0, 0 ), new THREE.Vector3( 0, 0, - 1 ) ] );
const line = new THREE.Line( geometry );
line.name = 'line';
line.scale.z = 5;
controller1.add( line.clone() );
controller2.add( line.clone() );
} );
renderer.xr.addEventListener("sessionstart", (e) => {
const session = renderer.xr.getSession(),
isIOS = ((/iPad/i).test(navigator.platform) || (navigator.platform === 'MacIntel' && navigator.maxTouchPoints > 1));
//console.log("Session started", session);
//visionOS with inputSources
const visionOSWithControls = isIOS && session.inputSources && session.inputSources.length,
hasHandTracking = session.enabledFeatures && session.enabledFeatures.indexOf("hand-tracking") > 0;
if (hasHandTracking || visionOSWithControls) {
console.log("supports hand tracking");
const controllerModelFactory = new XRControllerModelFactory();
const handModelFactory = new XRHandModelFactory();
// Hand 1
controllerGrip1 = renderer.xr.getControllerGrip( 0 );
controllerGrip1.add( controllerModelFactory.createControllerModel( controllerGrip1 ) );
scene.add( controllerGrip1 );
hand1 = renderer.xr.getHand( 0 );
hand1.add( handModelFactory.createHandModel( hand1 ) );
scene.add( hand1 );
// Hand 2
controllerGrip2 = renderer.xr.getControllerGrip( 1 );
controllerGrip2.add( controllerModelFactory.createControllerModel( controllerGrip2 ) );
scene.add( controllerGrip2 );
hand2 = renderer.xr.getHand( 1 );
hand2.add( handModelFactory.createHandModel( hand2 ) );
scene.add( hand2 );
} else {
console.log("No hand tracking support");
}
//console.log("session start ", renderer.xr.getSession());
});
1 Like
I’ve figured out a solution. I can toggle between the controller and hand controller. Hand controller is added dynamically on the connect event. A hand property is added on a hand input source. But grip seems to be empty regardless. ray pointer line is toggled depending on input source. Will use the hand tracked pointer for intersect marker like I have for gaze control.
controller1 = renderer.xr.getController( 0 );
scene.add( controller1 );
let rayConfigured = false,handConfigured = false, hand1 = null, handPointer1 = null, line = null;
const controllerModelFactory = new XRControllerModelFactory();
controllerGrip1 = renderer.xr.getControllerGrip( 0 );
controllerGrip1.add( controllerModelFactory.createControllerModel( controllerGrip1 ) );
scene.add( controllerGrip1 );
controller1.addEventListener( 'connected', function ( event ) {
const hasHand = event.data.hand;
if (!rayConfigured) {
rayConfigured = true;
const geometry = new THREE.BufferGeometry().setFromPoints( [ new THREE.Vector3( 0, 0, 0 ), new THREE.Vector3( 0, 0, - 1 ) ] );
line = new THREE.Line( geometry );
line.name = 'line';
line.scale.z = 5;
controller1.add( line );
//controller2.add( line.clone() );
} else if (line) {
line.visible = !hasHand;
}
if (event.data.hand && !handConfigured) {
handConfigured = true;
hand1 = renderer.xr.getHand( 0 );
hand1.add( new OculusHandModel( hand1 ) );
handPointer1 = new OculusHandPointerModel( hand1, controller1 );
hand1.add( handPointer1 );
scene.add( hand1 );
hand1.addEventListener( 'connected', () => {
handPointer1.setCursor( 1.5 );
handPointer1.setAttached( false );
});
hand1.addEventListener( 'pinchstart', () => {
const intersections = [orangeButton].filter(object => {
const intersections1 = handPointer1.intersectObject(object, false);
return intersections1 && intersections1.length;
});
console.log("INTERSECT ", intersections);
});
hand1.addEventListener( 'pinchend', () => {
} );
}
} );