FFmpeg  4.4
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUPixFmtMap {
36  VdpYCbCrFormat vdpau_fmt;
39 
40 static const VDPAUPixFmtMap pix_fmts_420[] = {
41  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
42  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
43 #ifdef VDP_YCBCR_FORMAT_P016
44  { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 },
45  { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 },
46 #endif
47  { 0, AV_PIX_FMT_NONE, },
48 };
49 
50 static const VDPAUPixFmtMap pix_fmts_422[] = {
51  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
52  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
53  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
54  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
55  { 0, AV_PIX_FMT_NONE, },
56 };
57 
58 static const VDPAUPixFmtMap pix_fmts_444[] = {
59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
60  { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
61 #endif
62 #ifdef VDP_YCBCR_FORMAT_P016
63  {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
64 #endif
65  { 0, AV_PIX_FMT_NONE, },
66 };
67 
68 static const struct {
69  VdpChromaType chroma_type;
72 } vdpau_pix_fmts[] = {
73  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
74  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
75  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
76 #ifdef VDP_YCBCR_FORMAT_P016
77  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
78  { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
79  { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
80  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
81  { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
82 #endif
83 };
84 
85 typedef struct VDPAUDeviceContext {
86  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
87  VdpVideoSurfaceGetBitsYCbCr *get_data;
88  VdpVideoSurfacePutBitsYCbCr *put_data;
89  VdpVideoSurfaceCreate *surf_create;
90  VdpVideoSurfaceDestroy *surf_destroy;
91 
95 
96 typedef struct VDPAUFramesContext {
97  VdpVideoSurfaceGetBitsYCbCr *get_data;
98  VdpVideoSurfacePutBitsYCbCr *put_data;
99  VdpChromaType chroma_type;
101 
102  const enum AVPixelFormat *pix_fmts;
105 
106 static int count_pixfmts(const VDPAUPixFmtMap *map)
107 {
108  int count = 0;
109  while (map->pix_fmt != AV_PIX_FMT_NONE) {
110  map++;
111  count++;
112  }
113  return count;
114 }
115 
117 {
118  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
119  VDPAUDeviceContext *priv = ctx->internal->priv;
120  int i;
121 
122  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
123  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
124  int nb_pix_fmts;
125 
126  nb_pix_fmts = count_pixfmts(map);
127  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
128  if (!priv->pix_fmts[i])
129  return AVERROR(ENOMEM);
130 
131  nb_pix_fmts = 0;
132  while (map->pix_fmt != AV_PIX_FMT_NONE) {
133  VdpBool supported;
134  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
135  map->vdpau_fmt, &supported);
136  if (err == VDP_STATUS_OK && supported)
137  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
138  map++;
139  }
140  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
141  priv->nb_pix_fmts[i] = nb_pix_fmts;
142  }
143 
144  return 0;
145 }
146 
147 #define GET_CALLBACK(id, result) \
148 do { \
149  void *tmp; \
150  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
151  if (err != VDP_STATUS_OK) { \
152  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
153  return AVERROR_UNKNOWN; \
154  } \
155  result = tmp; \
156 } while (0)
157 
159 {
160  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
161  VDPAUDeviceContext *priv = ctx->internal->priv;
162  VdpStatus err;
163  int ret;
164 
165  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
166  priv->get_transfer_caps);
167  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
168  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
169  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
170  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
171 
172  ret = vdpau_init_pixmfts(ctx);
173  if (ret < 0) {
174  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
175  return ret;
176  }
177 
178  return 0;
179 }
180 
182 {
183  VDPAUDeviceContext *priv = ctx->internal->priv;
184  int i;
185 
186  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
187  av_freep(&priv->pix_fmts[i]);
188 }
189 
191  const void *hwconfig,
192  AVHWFramesConstraints *constraints)
193 {
194  VDPAUDeviceContext *priv = ctx->internal->priv;
195  int nb_sw_formats = 0;
196  int i;
197 
199  sizeof(*constraints->valid_sw_formats));
200  if (!constraints->valid_sw_formats)
201  return AVERROR(ENOMEM);
202 
203  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
204  if (priv->nb_pix_fmts[i] > 1)
205  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
206  }
207  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
208 
209  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
210  if (!constraints->valid_hw_formats)
211  return AVERROR(ENOMEM);
212 
213  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
214  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
215 
216  return 0;
217 }
218 
219 static void vdpau_buffer_free(void *opaque, uint8_t *data)
220 {
221  AVHWFramesContext *ctx = opaque;
222  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
223  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
224 
225  device_priv->surf_destroy(surf);
226 }
227 
229 {
230  AVHWFramesContext *ctx = opaque;
231  VDPAUFramesContext *priv = ctx->internal->priv;
232  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
233  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
234 
235  AVBufferRef *ret;
236  VdpVideoSurface surf;
237  VdpStatus err;
238 
239  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
240  ctx->width, ctx->height, &surf);
241  if (err != VDP_STATUS_OK) {
242  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
243  return NULL;
244  }
245 
246  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
248  if (!ret) {
249  device_priv->surf_destroy(surf);
250  return NULL;
251  }
252 
253  return ret;
254 }
255 
257 {
258  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
259  VDPAUFramesContext *priv = ctx->internal->priv;
260 
261  int i;
262 
263  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
264  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
265  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
266  priv->chroma_idx = i;
267  priv->pix_fmts = device_priv->pix_fmts[i];
268  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
269  break;
270  }
271  }
272  if (priv->nb_pix_fmts < 2) {
273  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
274  av_get_pix_fmt_name(ctx->sw_format));
275  return AVERROR(ENOSYS);
276  }
277 
278  if (!ctx->pool) {
279  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
281  if (!ctx->internal->pool_internal)
282  return AVERROR(ENOMEM);
283  }
284 
285  priv->get_data = device_priv->get_data;
286  priv->put_data = device_priv->put_data;
287 
288  return 0;
289 }
290 
292 {
293  frame->buf[0] = av_buffer_pool_get(ctx->pool);
294  if (!frame->buf[0])
295  return AVERROR(ENOMEM);
296 
297  frame->data[3] = frame->buf[0]->data;
299  frame->width = ctx->width;
300  frame->height = ctx->height;
301 
302  return 0;
303 }
304 
307  enum AVPixelFormat **formats)
308 {
309  VDPAUFramesContext *priv = ctx->internal->priv;
310 
311  enum AVPixelFormat *fmts;
312 
313  if (priv->nb_pix_fmts == 1) {
315  "No target formats are supported for this chroma type\n");
316  return AVERROR(ENOSYS);
317  }
318 
319  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
320  if (!fmts)
321  return AVERROR(ENOMEM);
322 
323  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
324  *formats = fmts;
325 
326  return 0;
327 }
328 
330  const AVFrame *src)
331 {
332  VDPAUFramesContext *priv = ctx->internal->priv;
333  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
334 
335  void *data[3];
336  uint32_t linesize[3];
337 
338  const VDPAUPixFmtMap *map;
339  VdpYCbCrFormat vdpau_format;
340  VdpStatus err;
341  int i;
342 
343  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
344  data[i] = dst->data[i];
345  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
347  "The linesize %d cannot be represented as uint32\n",
348  dst->linesize[i]);
349  return AVERROR(ERANGE);
350  }
351  linesize[i] = dst->linesize[i];
352  }
353 
354  map = vdpau_pix_fmts[priv->chroma_idx].map;
355  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
356  if (map[i].pix_fmt == dst->format) {
357  vdpau_format = map[i].vdpau_fmt;
358  break;
359  }
360  }
361  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
363  "Unsupported target pixel format: %s\n",
365  return AVERROR(EINVAL);
366  }
367 
368  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
369 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
370  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
371 #endif
372 #ifdef VDP_YCBCR_FORMAT_P016
373  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
374 #endif
375  )
376  FFSWAP(void*, data[1], data[2]);
377 
378  err = priv->get_data(surf, vdpau_format, data, linesize);
379  if (err != VDP_STATUS_OK) {
380  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
381  return AVERROR_UNKNOWN;
382  }
383 
384  return 0;
385 }
386 
388  const AVFrame *src)
389 {
390  VDPAUFramesContext *priv = ctx->internal->priv;
391  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
392 
393  const void *data[3];
394  uint32_t linesize[3];
395 
396  const VDPAUPixFmtMap *map;
397  VdpYCbCrFormat vdpau_format;
398  VdpStatus err;
399  int i;
400 
401  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
402  data[i] = src->data[i];
403  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
405  "The linesize %d cannot be represented as uint32\n",
406  src->linesize[i]);
407  return AVERROR(ERANGE);
408  }
409  linesize[i] = src->linesize[i];
410  }
411 
412  map = vdpau_pix_fmts[priv->chroma_idx].map;
413  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
414  if (map[i].pix_fmt == src->format) {
415  vdpau_format = map[i].vdpau_fmt;
416  break;
417  }
418  }
419  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
421  "Unsupported source pixel format: %s\n",
422  av_get_pix_fmt_name(src->format));
423  return AVERROR(EINVAL);
424  }
425 
426  if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
427 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
428  || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
429 #endif
430  )
431  FFSWAP(const void*, data[1], data[2]);
432 
433  err = priv->put_data(surf, vdpau_format, data, linesize);
434  if (err != VDP_STATUS_OK) {
435  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
436  return AVERROR_UNKNOWN;
437  }
438 
439  return 0;
440 }
441 
442 #if HAVE_VDPAU_X11
443 #include <vdpau/vdpau_x11.h>
444 #include <X11/Xlib.h>
445 
446 typedef struct VDPAUDevicePriv {
447  VdpDeviceDestroy *device_destroy;
448  Display *dpy;
449 } VDPAUDevicePriv;
450 
451 static void vdpau_device_free(AVHWDeviceContext *ctx)
452 {
453  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
454  VDPAUDevicePriv *priv = ctx->user_opaque;
455 
456  if (priv->device_destroy)
457  priv->device_destroy(hwctx->device);
458  if (priv->dpy)
459  XCloseDisplay(priv->dpy);
460  av_freep(&priv);
461 }
462 
463 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
464  AVDictionary *opts, int flags)
465 {
466  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
467 
468  VDPAUDevicePriv *priv;
469  VdpStatus err;
470  VdpGetInformationString *get_information_string;
471  const char *display, *vendor;
472 
473  priv = av_mallocz(sizeof(*priv));
474  if (!priv)
475  return AVERROR(ENOMEM);
476 
477  ctx->user_opaque = priv;
478  ctx->free = vdpau_device_free;
479 
480  priv->dpy = XOpenDisplay(device);
481  if (!priv->dpy) {
482  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
483  XDisplayName(device));
484  return AVERROR_UNKNOWN;
485  }
486  display = XDisplayString(priv->dpy);
487 
488  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
489  &hwctx->device, &hwctx->get_proc_address);
490  if (err != VDP_STATUS_OK) {
491  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
492  display);
493  return AVERROR_UNKNOWN;
494  }
495 
496  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
497  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
498 
499  get_information_string(&vendor);
500  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
501  "X11 display %s\n", vendor, display);
502 
503  return 0;
504 }
505 #endif
506 
509  .name = "VDPAU",
510 
511  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
512  .device_priv_size = sizeof(VDPAUDeviceContext),
513  .frames_priv_size = sizeof(VDPAUFramesContext),
514 
515 #if HAVE_VDPAU_X11
516  .device_create = vdpau_device_create,
517 #endif
519  .device_uninit = vdpau_device_uninit,
520  .frames_get_constraints = vdpau_frames_get_constraints,
521  .frames_init = vdpau_frames_init,
522  .frames_get_buffer = vdpau_get_buffer,
523  .transfer_get_formats = vdpau_transfer_get_formats,
524  .transfer_data_to = vdpau_transfer_data_to,
525  .transfer_data_from = vdpau_transfer_data_from,
526 
528 };
uint8_t
refcounted data buffer API
#define flags(name, subs,...)
Definition: cbs_av1.c:561
common internal and external API header
#define FFSWAP(type, a, b)
Definition: common.h:108
#define HAVE_VDPAU_X11
Definition: config.h:333
#define NULL
Definition: coverity.c:32
static enum AVPixelFormat pix_fmt
static AVFrame * frame
AVBufferRef * av_buffer_create(uint8_t *data, buffer_size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:128
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:373
AVBufferPool * av_buffer_pool_init2(buffer_size_t size, void *opaque, AVBufferRef *(*alloc)(void *opaque, buffer_size_t size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:245
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
#define AVERROR(e)
Definition: error.h:43
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:210
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
AVHWFrameTransferDirection
Definition: hwcontext.h:415
@ AV_HWDEVICE_TYPE_VDPAU
Definition: hwcontext.h:29
#define GET_CALLBACK(id, result)
static AVBufferRef * vdpau_pool_alloc(void *opaque, buffer_size_t size)
const HWContextType ff_hwcontext_type_vdpau
static void vdpau_buffer_free(void *opaque, uint8_t *data)
const VDPAUPixFmtMap * map
static const VDPAUPixFmtMap pix_fmts_420[]
static int count_pixfmts(const VDPAUPixFmtMap *map)
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
static int vdpau_frames_init(AVHWFramesContext *ctx)
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
enum AVPixelFormat frames_sw_format
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
static int vdpau_device_init(AVHWDeviceContext *ctx)
static const struct @308 vdpau_pix_fmts[]
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
static const VDPAUPixFmtMap pix_fmts_444[]
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static const VDPAUPixFmtMap pix_fmts_422[]
VdpChromaType chroma_type
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
int i
Definition: input.c:407
int buffer_size_t
Definition: internal.h:306
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:303
Memory handling functions.
const char data[16]
Definition: mxf.c:142
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2489
pixel format definitions
#define AV_PIX_FMT_YUV444P12
Definition: pixfmt.h:406
#define AV_PIX_FMT_YUV420P10
Definition: pixfmt.h:399
#define AV_PIX_FMT_YUV420P12
Definition: pixfmt.h:403
#define AV_PIX_FMT_P010
Definition: pixfmt.h:448
#define AV_PIX_FMT_P016
Definition: pixfmt.h:449
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:400
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_YUV422P
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
@ AV_PIX_FMT_UYVY422
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:81
@ AV_PIX_FMT_NV16
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:201
@ AV_PIX_FMT_YUV444P
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
@ AV_PIX_FMT_YUYV422
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:67
@ AV_PIX_FMT_VDPAU
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:197
#define AV_PIX_FMT_YUV444P16
Definition: pixfmt.h:412
#define AV_PIX_FMT_YUV444P10
Definition: pixfmt.h:402
formats
Definition: signature.h:48
#define FF_ARRAY_ELEMS(a)
A reference to a data buffer.
Definition: buffer.h:84
uint8_t * data
The data buffer.
Definition: buffer.h:92
AVFormatInternal * internal
An opaque field for libavformat internal usage.
Definition: avformat.h:1699
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int width
Definition: frame.h:376
int height
Definition: frame.h:376
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:509
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:391
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:453
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:458
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:465
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
This struct is allocated as AVHWDeviceContext.hwctx.
VdpGetProcAddress * get_proc_address
enum AVHWDeviceType type
VdpVideoSurfacePutBitsYCbCr * put_data
enum AVPixelFormat * pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
VdpVideoSurfaceCreate * surf_create
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]
VdpVideoSurfaceGetBitsYCbCr * get_data
VdpVideoSurfaceDestroy * surf_destroy
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
enum AVPixelFormat * pix_fmts
VdpChromaType chroma_type
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfaceGetBitsYCbCr * get_data
VdpYCbCrFormat vdpau_fmt
enum AVPixelFormat pix_fmt
#define av_malloc_array(a, b)
#define av_freep(p)
#define av_log(a,...)
#define src
Definition: vp8dsp.c:255
AVFormatContext * ctx
Definition: movenc.c:48
AVDictionary * opts
Definition: movenc.c:50
int size
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:192