/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim: set ts=8 sts=2 et sw=2 tw=80: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
void VRDisplay::LastRelease() { // We don't want to wait for the CC to free up the presentation // for use in other documents, so we do this in LastRelease().
Shutdown();
}
void VRDisplay::UpdateFrameInfo() { /** * The WebVR 1.1 spec Requires that VRDisplay.getPose and * VRDisplay.getFrameData must return the same values until the next * VRDisplay.submitFrame. * * mFrameInfo is marked dirty at the end of the frame or start of a new * composition and lazily created here in order to receive mid-frame * pose-prediction updates while still ensuring conformance to the WebVR spec * requirements. * * If we are not presenting WebVR content, the frame will never end and we * should return the latest frame data always.
*/
mFrameInfo.Clear();
if ((mFrameInfo.IsDirty() && IsPresenting()) ||
mClient->GetDisplayInfo().GetPresentingGroups() == 0) { const gfx::VRHMDSensorState& state = mClient->GetSensorState(); const gfx::VRDisplayInfo& info = mClient->GetDisplayInfo();
mFrameInfo.Update(info, state, mDepthNear, mDepthFar);
}
}
bool VRDisplay::GetFrameData(VRFrameData& aFrameData) {
UpdateFrameInfo(); if (!(mFrameInfo.mVRState.flags &
gfx::VRDisplayCapabilityFlags::Cap_Orientation)) { // We must have at minimum Cap_Orientation for a valid pose. returnfalse;
}
aFrameData.Update(mFrameInfo); returntrue;
}
already_AddRefed<VRPose> VRDisplay::GetPose() {
UpdateFrameInfo();
RefPtr<VRPose> obj = new VRPose(GetParentObject(), mFrameInfo.mVRState);
return obj.forget();
}
void VRDisplay::ResetPose() { // ResetPose is deprecated and unimplemented // We must keep this stub function around as its referenced by // VRDisplay.webidl. Not asserting here, as that could break existing web // content.
}
void VRDisplay::StartHandlingVRNavigationEvent() {
mHandlingVRNavigationEventStart = TimeStamp::Now();
++mVRNavigationEventDepth;
TimeDuration timeout =
TimeDuration::FromMilliseconds(StaticPrefs::dom_vr_navigation_timeout()); // A 0 or negative TimeDuration indicates that content may take // as long as it wishes to respond to the event, as long as // it happens before the event exits. if (timeout.ToMilliseconds() > 0) {
mClient->StopVRNavigation(timeout);
}
}
mClient->SetXRAPIMode(gfx::VRAPIMode::WebVR); if (!UserActivation::IsHandlingUserInput() && !isChromePresentation &&
!IsHandlingVRNavigationEvent() && StaticPrefs::dom_vr_require_gesture() &&
!IsPresenting()) { // The WebVR API states that if called outside of a user gesture, the // promise must be rejected. We allow VR presentations to start within // trusted events such as vrdisplayactivate, which triggers in response to // HMD proximity sensors and when navigating within a VR presentation. // This user gesture requirement is not enforced for chrome/system code.
promise->MaybeRejectWithUndefined();
} elseif (!IsPresenting() && IsAnyPresenting(presentationGroup)) { // Only one presentation allowed per VRDisplay on a // first-come-first-serve basis. // If this Javascript context is presenting, then we can replace our // presentation with a new one containing new layers but we should never // replace the presentation of another context. // Simultaneous presentations in other groups are allowed in separate // Javascript contexts to enable browser UI from chrome/system contexts. // Eventually, this restriction will be loosened to enable multitasking // use cases.
promise->MaybeRejectWithUndefined();
} else { if (mPresentation) {
mPresentation->UpdateLayers(aLayers);
} else {
mPresentation = mClient->BeginPresentation(aLayers, presentationGroup);
}
mFrameInfo.Clear();
promise->MaybeResolve(JS::UndefinedHandleValue);
} return promise.forget();
}
if (!IsPresenting()) { // We can not exit a presentation outside of the context that // started the presentation.
promise->MaybeRejectWithUndefined();
} else {
promise->MaybeResolve(JS::UndefinedHandleValue);
ExitPresentInternal();
}
bool VRDisplay::IsPresenting() const { // IsPresenting returns true only if this Javascript context is presenting // and will return false if another context is presenting. return mPresentation != nullptr;
}
bool VRDisplay::IsAnyPresenting(uint32_t aGroupMask) const { // IsAnyPresenting returns true if either this VRDisplay object or any other // from anther Javascript context is presenting with a group matching // aGroupMask. if (mPresentation && (mPresentation->GetGroup() & aGroupMask)) { returntrue;
} if (mClient->GetDisplayInfo().GetPresentingGroups() & aGroupMask) { returntrue;
} returnfalse;
}
void VRFrameInfo::Update(const gfx::VRDisplayInfo& aInfo, const gfx::VRHMDSensorState& aState, float aDepthNear, float aDepthFar) {
mVRState = aState; if (mTimeStampOffset == 0.0f) { /** * A mTimeStampOffset value of 0.0f indicates that this is the first * iteration and an offset has not yet been set. * * Generate a value for mTimeStampOffset such that if aState.timestamp is * monotonically increasing, aState.timestamp + mTimeStampOffset will never * be a negative number and will start at a pseudo-random offset * between 1000.0f and 11000.0f seconds. * * We use a pseudo random offset rather than 0.0f just to discourage users * from making the assumption that the timestamp returned in the WebVR API * has a base of 0, which is not necessarily true in all UA's.
*/
mTimeStampOffset = float(rand()) / float(RAND_MAX) * 10000.0f + 1000.0f - aState.timestamp;
}
mVRState.timestamp = aState.timestamp + mTimeStampOffset;
// Avoid division by zero within ConstructProjectionMatrix constfloat kEpsilon = 0.00001f; if (fabs(aDepthFar - aDepthNear) < kEpsilon) {
aDepthFar = aDepthNear + kEpsilon;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.