/* * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
typedefstruct FFHWDeviceContext { /** * The public AVHWDeviceContext. See hwcontext.h for it.
*/
AVHWDeviceContext p;
const HWContextType *hw_type;
/** * For a derived device, a reference to the original device * context it was derived from.
*/
AVBufferRef *source_device;
} FFHWDeviceContext;
enum AVHWDeviceType av_hwdevice_find_type_by_name(constchar *name)
{ int type; for (type = 0; type < FF_ARRAY_ELEMS(hw_type_names); type++) { if (hw_type_names[type] && !strcmp(hw_type_names[type], name)) return type;
} return AV_HWDEVICE_TYPE_NONE;
}
constchar *av_hwdevice_get_type_name(enum AVHWDeviceType type)
{ if (type > AV_HWDEVICE_TYPE_NONE &&
type < FF_ARRAY_ELEMS(hw_type_names)) return hw_type_names[type]; else return NULL;
}
enum AVHWDeviceType av_hwdevice_iterate_types(enum AVHWDeviceType prev)
{ enum AVHWDeviceType next; int i, set = 0; for (i = 0; hw_table[i]; i++) { if (prev != AV_HWDEVICE_TYPE_NONE && hw_table[i]->type <= prev) continue; if (!set || hw_table[i]->type < next) {
next = hw_table[i]->type;
set = 1;
}
} return set ? next : AV_HWDEVICE_TYPE_NONE;
}
/* uninit might still want access the hw context and the user
* free() callback might destroy it, so uninit has to be called first */ if (ctxi->hw_type->device_uninit)
ctxi->hw_type->device_uninit(ctx);
staticint hwframe_pool_prealloc(AVBufferRef *ref)
{
AVHWFramesContext *ctx = (AVHWFramesContext*)ref->data;
AVFrame **frames; int i, ret = 0;
frames = av_calloc(ctx->initial_pool_size, sizeof(*frames)); if (!frames) return AVERROR(ENOMEM);
for (i = 0; i < ctx->initial_pool_size; i++) {
frames[i] = av_frame_alloc(); if (!frames[i]) goto fail;
ret = av_hwframe_get_buffer(ref, frames[i], 0); if (ret < 0) goto fail;
}
fail: for (i = 0; i < ctx->initial_pool_size; i++)
av_frame_free(&frames[i]);
av_freep(&frames);
return ret;
}
int av_hwframe_ctx_init(AVBufferRef *ref)
{
FFHWFramesContext *ctxi = (FFHWFramesContext*)ref->data;
AVHWFramesContext *ctx = &ctxi->p; constenum AVPixelFormat *pix_fmt; int ret;
if (ctxi->source_frames) { /* A derived frame context is already initialised. */ return 0;
}
/* validate the pixel format */ for (pix_fmt = ctxi->hw_type->pix_fmts; *pix_fmt != AV_PIX_FMT_NONE; pix_fmt++) { if (*pix_fmt == ctx->format) break;
} if (*pix_fmt == AV_PIX_FMT_NONE) {
av_log(ctx, AV_LOG_ERROR, "The hardware pixel format '%s' is not supported by the device type '%s'\n",
av_get_pix_fmt_name(ctx->format), ctxi->hw_type->name); return AVERROR(ENOSYS);
}
/* validate the dimensions */
ret = av_image_check_size(ctx->width, ctx->height, 0, ctx); if (ret < 0) return ret;
/* format-specific init */ if (ctxi->hw_type->frames_init) {
ret = ctxi->hw_type->frames_init(ctx); if (ret < 0) return ret;
}
if (ctxi->pool_internal && !ctx->pool)
ctx->pool = ctxi->pool_internal;
/* preallocate the frames in the pool, if requested */ if (ctx->initial_pool_size > 0) {
ret = hwframe_pool_prealloc(ref); if (ret < 0) return ret;
}
return 0;
}
int av_hwframe_transfer_get_formats(AVBufferRef *hwframe_ref, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats, int flags)
{
FFHWFramesContext *ctxi = (FFHWFramesContext*)hwframe_ref->data;
if (!ctxi->hw_type->transfer_get_formats) return AVERROR(ENOSYS);
staticint transfer_data_alloc(AVFrame *dst, const AVFrame *src, int flags)
{
AVHWFramesContext *ctx;
AVFrame *frame_tmp; int ret = 0;
if (!src->hw_frames_ctx) return AVERROR(EINVAL);
ctx = (AVHWFramesContext*)src->hw_frames_ctx->data;
frame_tmp = av_frame_alloc(); if (!frame_tmp) return AVERROR(ENOMEM);
/* if the format is set, use that
* otherwise pick the first supported one */ if (dst->format >= 0) {
frame_tmp->format = dst->format;
} else { enum AVPixelFormat *formats;
int av_hwframe_transfer_data(AVFrame *dst, const AVFrame *src, int flags)
{ int ret;
if (!dst->buf[0]) return transfer_data_alloc(dst, src, flags);
/* * Hardware -> Hardware Transfer. * Unlike Software -> Hardware or Hardware -> Software, the transfer * function could be provided by either the src or dst, depending on * the specific combination of hardware.
*/ if (src->hw_frames_ctx && dst->hw_frames_ctx) {
FFHWFramesContext *src_ctx =
(FFHWFramesContext*)src->hw_frames_ctx->data;
FFHWFramesContext *dst_ctx =
(FFHWFramesContext*)dst->hw_frames_ctx->data;
if (src_ctx->source_frames) {
av_log(src_ctx, AV_LOG_ERROR, "A device with a derived frame context cannot be used as " "the source of a HW -> HW transfer."); return AVERROR(ENOSYS);
}
if (dst_ctx->source_frames) {
av_log(src_ctx, AV_LOG_ERROR, "A device with a derived frame context cannot be used as " "the destination of a HW -> HW transfer."); return AVERROR(ENOSYS);
}
ret = src_ctx->hw_type->transfer_data_from(&src_ctx->p, dst, src); if (ret == AVERROR(ENOSYS))
ret = dst_ctx->hw_type->transfer_data_to(&dst_ctx->p, dst, src); if (ret < 0) return ret;
} else { if (src->hw_frames_ctx) {
FFHWFramesContext *ctx = (FFHWFramesContext*)src->hw_frames_ctx->data;
ret = ctx->hw_type->transfer_data_from(&ctx->p, dst, src); if (ret < 0) return ret;
} elseif (dst->hw_frames_ctx) {
FFHWFramesContext *ctx = (FFHWFramesContext*)dst->hw_frames_ctx->data;
if ((src_frames == dst_frames &&
src->format == dst_frames->p.sw_format &&
dst->format == dst_frames->p.format) ||
(src_frames->source_frames &&
src_frames->source_frames->data ==
(uint8_t*)dst_frames)) { // This is an unmap operation. We don't need to directly // do anything here other than fill in the original frame, // because the real unmap will be invoked when the last // reference to the mapped frame disappears. if (!src->buf[0]) {
av_log(src_frames, AV_LOG_ERROR, "Invalid mapping " "found when attempting unmap.\n"); return AVERROR(EINVAL);
}
hwmap = (HWMapDescriptor*)src->buf[0]->data; return av_frame_replace(dst, hwmap->source);
}
}
if (src->hw_frames_ctx) {
FFHWFramesContext *src_frames = (FFHWFramesContext*)src->hw_frames_ctx->data;
if (src_frames->p.format == src->format &&
src_frames->hw_type->map_from) {
ret = src_frames->hw_type->map_from(&src_frames->p,
dst, src, flags); if (ret >= 0) return ret; elseif (ret != AVERROR(ENOSYS)) goto fail;
}
}
if (dst->hw_frames_ctx) {
FFHWFramesContext *dst_frames = (FFHWFramesContext*)dst->hw_frames_ctx->data;
if (dst_frames->p.format == dst->format &&
dst_frames->hw_type->map_to) {
ret = dst_frames->hw_type->map_to(&dst_frames->p,
dst, src, flags); if (ret >= 0) return ret; elseif (ret != AVERROR(ENOSYS)) goto fail;
}
}
return AVERROR(ENOSYS);
fail: // if the caller provided dst frames context, it should be preserved // by this function
av_assert0(orig_dst_frames == NULL ||
orig_dst_frames == dst->hw_frames_ctx);
// preserve user-provided dst frame fields, but clean // anything we might have set
dst->hw_frames_ctx = NULL;
av_frame_unref(dst);
if (src_src->device_ctx == dst_dev) { // This is actually an unmapping, so we just return a // reference to the source frame context.
*derived_frame_ctx = av_buffer_ref(srci->source_frames); if (!*derived_frame_ctx) {
ret = AVERROR(ENOMEM); goto fail;
} return 0;
}
}
dst_ref = av_hwframe_ctx_alloc(derived_device_ctx); if (!dst_ref) {
ret = AVERROR(ENOMEM); goto fail;
}
ret = AVERROR(ENOSYS); if (srci->hw_type->frames_derive_from)
ret = srci->hw_type->frames_derive_from(dst, src, flags); if (ret == AVERROR(ENOSYS) &&
dsti->hw_type->frames_derive_to)
ret = dsti->hw_type->frames_derive_to(dst, src, flags); if (ret == AVERROR(ENOSYS))
ret = 0; if (ret) goto fail;
*derived_frame_ctx = dst_ref; return 0;
fail: if (dsti)
av_buffer_unref(&dsti->source_frames);
av_buffer_unref(&dst_ref); return ret;
}
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung und die Messung sind noch experimentell.