#ifdef SK_BUILD_FOR_ANDROID_FRAMEWORK static constexpr int kPathRefGenIDBitCnt = 30; // leave room for the fill type (skbug.com/1762) #else static constexpr int kPathRefGenIDBitCnt = 32; #endif
//////////////////////////////////////////////////////////////////////////////
SkPathRef::Editor::Editor(sk_sp<SkPathRef>* pathRef, int incReserveVerbs, int incReservePoints, int incReserveConics)
{
SkASSERT(incReserveVerbs >= 0);
SkASSERT(incReservePoints >= 0);
if ((*pathRef)->unique()) {
(*pathRef)->incReserve(incReserveVerbs, incReservePoints, incReserveConics);
} else {
SkPathRef* copy; // No need to copy if the existing ref is the empty ref (because it doesn't contain // anything). if (!(*pathRef)->isInitialEmptyPathRef()) {
copy = new SkPathRef;
copy->copy(**pathRef, incReserveVerbs, incReservePoints, incReserveConics);
} else { // Size previously empty paths to exactly fit the supplied hints. The assumpion is // the caller knows the exact size they want (as happens in chrome when deserializing // paths).
copy = new SkPathRef(incReserveVerbs, incReservePoints, incReserveConics);
}
pathRef->reset(copy);
}
fPathRef = pathRef->get();
fPathRef->callGenIDChangeListeners();
fPathRef->fGenerationID = 0;
fPathRef->fBoundsIsDirty = true;
SkDEBUGCODE(fPathRef->fEditorsAttached++;)
}
SkPathRef::~SkPathRef() { // Deliberately don't validate() this path ref, otherwise there's no way // to read one that's not valid and then free its memory without asserting.
SkDEBUGCODE(fGenerationID = 0xEEEEEEEE;)
SkDEBUGCODE(fEditorsAttached.store(0x7777777);)
}
static SkPathRef* gEmpty = nullptr;
SkPathRef* SkPathRef::CreateEmpty() { static SkOnce once;
once([]{
gEmpty = new SkPathRef;
gEmpty->computeBounds(); // Avoids races later to be the first to do this.
}); return SkRef(gEmpty);
}
staticvoid transform_dir_and_start(const SkMatrix& matrix, bool isRRect, bool* isCCW, unsigned* start) { int inStart = *start; int rm = 0; if (isRRect) { // Degenerate rrect indices to oval indices and remember the remainder. // Ovals have one index per side whereas rrects have two.
rm = inStart & 0b1;
inStart /= 2;
} // Is the antidiagonal non-zero (otherwise the diagonal is zero) int antiDiag; // Is the non-zero value in the top row (either kMScaleX or kMSkewX) negative int topNeg; // Are the two non-zero diagonal or antidiagonal values the same sign. int sameSign; if (matrix.get(SkMatrix::kMScaleX) != 0) {
antiDiag = 0b00; if (matrix.get(SkMatrix::kMScaleX) > 0) {
topNeg = 0b00;
sameSign = matrix.get(SkMatrix::kMScaleY) > 0 ? 0b01 : 0b00;
} else {
topNeg = 0b10;
sameSign = matrix.get(SkMatrix::kMScaleY) > 0 ? 0b00 : 0b01;
}
} else {
antiDiag = 0b01; if (matrix.get(SkMatrix::kMSkewX) > 0) {
topNeg = 0b00;
sameSign = matrix.get(SkMatrix::kMSkewY) > 0 ? 0b01 : 0b00;
} else {
topNeg = 0b10;
sameSign = matrix.get(SkMatrix::kMSkewY) > 0 ? 0b00 : 0b01;
}
} if (sameSign != antiDiag) { // This is a rotation (and maybe scale). The direction is unchanged. // Trust me on the start computation (or draw yourself some pictures)
*start = (inStart + 4 - (topNeg | antiDiag)) % 4;
SkASSERT(*start < 4); if (isRRect) {
*start = 2 * *start + rm;
}
} else { // This is a mirror (and maybe scale). The direction is reversed.
*isCCW = !*isCCW; // Trust me on the start computation (or draw yourself some pictures)
*start = (6 + (topNeg | antiDiag) - inStart) % 4;
SkASSERT(*start < 4); if (isRRect) {
*start = 2 * *start + (rm ? 0 : 1);
}
}
}
sk_sp<const SkPathRef> srcKeepAlive; if (!(*dst)->unique()) { // If dst and src are the same then we are about to drop our only ref on the common path // ref. Some other thread may have owned src when we checked unique() above but it may not // continue to do so. Add another ref so we continue to be an owner until we're done. if (dst->get() == &src) {
srcKeepAlive.reset(SkRef(&src));
}
dst->reset(new SkPathRef);
}
if (dst->get() != &src) {
(*dst)->fVerbs = src.fVerbs;
(*dst)->fConicWeights = src.fConicWeights;
(*dst)->callGenIDChangeListeners();
(*dst)->fGenerationID = 0; // mark as dirty // don't copy, just allocate the points
(*dst)->fPoints.resize(src.fPoints.size());
}
matrix.mapPoints((*dst)->fPoints.begin(), src.fPoints.begin(), src.fPoints.size());
// Need to check this here in case (&src == dst) bool canXformBounds = !src.fBoundsIsDirty && matrix.rectStaysRect() && src.countPoints() > 1;
/* * Here we optimize the bounds computation, by noting if the bounds are * already known, and if so, we just transform those as well and mark * them as "known", rather than force the transformed path to have to * recompute them. * * Special gotchas if the path is effectively empty (<= 1 point) or * if it is non-finite. In those cases bounds need to stay empty, * regardless of the matrix.
*/ if (canXformBounds) {
(*dst)->fBoundsIsDirty = false; if (src.fIsFinite) {
matrix.mapRect(&(*dst)->fBounds, src.fBounds); if (!((*dst)->fIsFinite = (*dst)->fBounds.isFinite())) {
(*dst)->fBounds.setEmpty();
} elseif (src.countPoints() & 1) { /* Matrix optimizations may cause the first point to use slightly different * math for its transform, which can lead to it being outside the transformed * bounds. Include it in the bounds just in case.
*/
SkPoint p = (*dst)->fPoints[0];
SkRect& r = (*dst)->fBounds;
r.fLeft = std::min(r.fLeft, p.fX);
r.fTop = std::min(r.fTop, p.fY);
r.fRight = std::max(r.fRight, p.fX);
r.fBottom = std::max(r.fBottom, p.fY);
}
} else {
(*dst)->fIsFinite = false;
(*dst)->fBounds.setEmpty();
}
} else {
(*dst)->fBoundsIsDirty = true;
}
(*dst)->fSegmentMask = src.fSegmentMask;
// It's an oval only if it stays a rect. Technically if scale is uniform, then it would stay an // arc. For now, don't bother handling that (we'd also need to fixup the angles for negative // scale, etc.) bool rectStaysRect = matrix.rectStaysRect(); const PathType newType =
(rectStaysRect && src.fType != PathType::kArc) ? src.fType : PathType::kGeneral;
(*dst)->fType = newType; if (newType == PathType::kOval || newType == PathType::kRRect) { unsigned start = src.fRRectOrOvalStartIdx; bool isCCW = SkToBool(src.fRRectOrOvalIsCCW);
transform_dir_and_start(matrix, newType == PathType::kRRect, &isCCW, &start);
(*dst)->fRRectOrOvalIsCCW = isCCW;
(*dst)->fRRectOrOvalStartIdx = start;
}
if (dst->get() == &src) {
(*dst)->callGenIDChangeListeners();
(*dst)->fGenerationID = 0;
}
SkDEBUGCODE((*dst)->validate();)
}
void SkPathRef::Rewind(sk_sp<SkPathRef>* pathRef) { if ((*pathRef)->unique()) {
SkDEBUGCODE((*pathRef)->validate();)
(*pathRef)->callGenIDChangeListeners();
(*pathRef)->fBoundsIsDirty = true; // this also invalidates fIsFinite
(*pathRef)->fGenerationID = 0;
(*pathRef)->fPoints.clear();
(*pathRef)->fVerbs.clear();
(*pathRef)->fConicWeights.clear();
(*pathRef)->fSegmentMask = 0;
(*pathRef)->fType = PathType::kGeneral;
SkDEBUGCODE((*pathRef)->validate();)
} else { int oldVCnt = (*pathRef)->countVerbs(); int oldPCnt = (*pathRef)->countPoints();
pathRef->reset(new SkPathRef);
(*pathRef)->resetToSize(0, 0, 0, oldVCnt, oldPCnt);
}
}
// We explicitly check fSegmentMask as a quick-reject. We could skip it, // since it is only a cache of info in the fVerbs, but its a fast way to // notice a difference if (fSegmentMask != ref.fSegmentMask) { returnfalse;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.