staticbool is_valid_sample_size(int sampleSize) { // FIXME: As Leon has mentioned elsewhere, surely there is also a maximum sampleSize? return sampleSize > 0;
}
const SkEncodedImageFormat format = codec->getEncodedFormat(); if (format == SkEncodedImageFormat::kAVIF) { if (SkCodecs::HasDecoder("avif")) { // If a dedicated AVIF decoder has been registered, SkAvifCodec can // handle scaling internally. return std::make_unique<SkAndroidCodecAdapter>(codec.release());
} // This will fallback to SkHeifCodec, which needs sampling. return std::make_unique<SkSampledCodec>(codec.release());
}
switch (format) { case SkEncodedImageFormat::kPNG: case SkEncodedImageFormat::kICO: case SkEncodedImageFormat::kJPEG: case SkEncodedImageFormat::kBMP: case SkEncodedImageFormat::kWBMP: case SkEncodedImageFormat::kHEIF: return std::make_unique<SkSampledCodec>(codec.release()); case SkEncodedImageFormat::kGIF: case SkEncodedImageFormat::kWEBP: case SkEncodedImageFormat::kDNG: return std::make_unique<SkAndroidCodecAdapter>(codec.release()); case SkEncodedImageFormat::kAVIF: // Handled above case SkEncodedImageFormat::kPKM: case SkEncodedImageFormat::kKTX: case SkEncodedImageFormat::kASTC: case SkEncodedImageFormat::kJPEGXL: return nullptr;
}
SkUNREACHABLE;
}
SkColorType SkAndroidCodec::computeOutputColorType(SkColorType requestedColorType) { bool highPrecision = fCodec->getEncodedInfo().bitsPerComponent() > 8;
uint8_t colorDepth = fCodec->getEncodedInfo().getColorDepth(); switch (requestedColorType) { case kARGB_4444_SkColorType: return kN32_SkColorType; case kN32_SkColorType: break; case kAlpha_8_SkColorType: // Fall through to kGray_8. Before kGray_8_SkColorType existed, // we allowed clients to request kAlpha_8 when they wanted a // grayscale decode. case kGray_8_SkColorType: if (kGray_8_SkColorType == this->getInfo().colorType()) { return kGray_8_SkColorType;
} break; case kRGB_565_SkColorType: if (kOpaque_SkAlphaType == this->getInfo().alphaType()) { return kRGB_565_SkColorType;
} break; case kRGBA_1010102_SkColorType: if (colorDepth == 10) { return kRGBA_1010102_SkColorType;
} break; case kRGBA_F16_SkColorType: return kRGBA_F16_SkColorType; default: break;
}
// F16 is the Android default for high precision images. return highPrecision ? kRGBA_F16_SkColorType :
(colorDepth == 10 ? kRGBA_1010102_SkColorType : kN32_SkColorType);
}
sk_sp<SkColorSpace> SkAndroidCodec::computeOutputColorSpace(SkColorType outputColorType,
sk_sp<SkColorSpace> prefColorSpace) { switch (outputColorType) { case kRGBA_F16_SkColorType: case kRGB_565_SkColorType: case kRGBA_8888_SkColorType: case kBGRA_8888_SkColorType: case kRGBA_1010102_SkColorType: { // If |prefColorSpace| is supplied, choose it. if (prefColorSpace) { return prefColorSpace;
}
const skcms_ICCProfile* encodedProfile = fCodec->getEncodedInfo().profile(); if (encodedProfile) { // Prefer CICP information if it exists. if (encodedProfile->has_CICP) { constauto cicpColorSpace =
cicp_get_sk_color_space(encodedProfile->CICP.color_primaries,
encodedProfile->CICP.transfer_characteristics,
encodedProfile->CICP.matrix_coefficients,
encodedProfile->CICP.video_full_range_flag); if (cicpColorSpace) { return cicpColorSpace;
}
} if (auto encodedSpace = SkColorSpace::Make(*encodedProfile)) { // Leave the pixels in the encoded color space. Color space conversion // will be handled after decode time. return encodedSpace;
}
if (encodedProfile->has_toXYZD50) { return SkColorSpace::MakeRGB(SkNamedTransferFn::kSRGB,
encodedProfile->toXYZD50);
}
}
return SkColorSpace::MakeSRGB();
} default: // Color correction not supported for kGray. return nullptr;
}
}
// There are a variety of ways two SkISizes could be compared. This method // returns true if either dimensions of a is < that of b. // computeSampleSize also uses the opposite, which means that both // dimensions of a >= b. staticinlinebool smaller_than(const SkISize& a, const SkISize& b) { return a.width() < b.width() || a.height() < b.height();
}
// Both dimensions of a > that of b. staticinlinebool strictly_bigger_than(const SkISize& a, const SkISize& b) { return a.width() > b.width() && a.height() > b.height();
}
int SkAndroidCodec::computeSampleSize(SkISize* desiredSize) const {
SkASSERT(desiredSize);
if (strictly_bigger_than(computedSize, *desiredSize)) { // See if there is a tighter fit. while (true) { auto smaller = this->getSampledDimensions(sampleSize + 1); if (smaller == *desiredSize) { return sampleSize + 1;
} if (smaller == computedSize || smaller_than(smaller, *desiredSize)) { // Cannot get any smaller without being smaller than desired.
*desiredSize = computedSize; return sampleSize;
}
sampleSize++;
computedSize = smaller;
}
SkASSERT(false);
}
if (!smaller_than(computedSize, *desiredSize)) { // This means one of the computed dimensions is equal to desired, and // the other is bigger. This is as close as we can get.
*desiredSize = computedSize; return sampleSize;
}
// computedSize is too small. Make it larger. while (sampleSize > 2) { auto bigger = this->getSampledDimensions(sampleSize - 1); if (bigger == *desiredSize || !smaller_than(bigger, *desiredSize)) {
*desiredSize = bigger; return sampleSize - 1;
}
sampleSize--;
}
// If the subset is the entire image, for consistency, use getSampledDimensions(). if (fCodec->dimensions() == subset.size()) { return this->getSampledDimensions(sampleSize);
}
// This should perhaps call a virtual function, but currently both of our subclasses // want the same implementation. return {get_scaled_dimension(subset.width(), sampleSize),
get_scaled_dimension(subset.height(), sampleSize)};
}
AndroidOptions defaultOptions; if (!options) {
options = &defaultOptions;
} else { if (options->fSubset) { if (!is_valid_subset(*options->fSubset, fCodec->dimensions())) { return SkCodec::kInvalidParameters;
}
if (SkIRect::MakeSize(fCodec->dimensions()) == *options->fSubset) { // The caller wants the whole thing, rather than a subset. Modify // the AndroidOptions passed to onGetAndroidPixels to not specify // a subset.
defaultOptions = *options;
defaultOptions.fSubset = nullptr;
options = &defaultOptions;
}
}
}
// We may need to have handleFrameIndex recursively call this method // to resolve one frame depending on another. The recursion stops // when we find a frame which does not require an earlier frame // e.g. frame->getRequiredFrame() returns kNoFrame auto getPixelsFn = [&](const SkImageInfo& info, void* pixels, size_t rowBytes, const SkCodec::Options& opts, int requiredFrame
) -> SkCodec::Result {
SkAndroidCodec::AndroidOptions prevFrameOptions( reinterpret_cast<const SkAndroidCodec::AndroidOptions&>(opts));
prevFrameOptions.fFrameIndex = requiredFrame; return this->getAndroidPixels(info, pixels, rowBytes, &prevFrameOptions);
}; if (auto result = fCodec->handleFrameIndex(requestInfo, requestPixels, requestRowBytes,
*options, getPixelsFn); result != SkCodec::kSuccess) { return result;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.