FFmpeg  4.4
mediacodecdec_common.c
Go to the documentation of this file.
1 /*
2  * Android MediaCodec decoder
3  *
4  * Copyright (c) 2015-2016 Matthieu Bouron <matthieu.bouron stupeflix.com>
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #include <string.h>
24 #include <sys/types.h>
25 
26 #include "libavutil/common.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/log.h"
30 #include "libavutil/pixfmt.h"
31 #include "libavutil/time.h"
32 #include "libavutil/timestamp.h"
33 
34 #include "avcodec.h"
35 #include "internal.h"
36 
37 #include "mediacodec.h"
38 #include "mediacodec_surface.h"
39 #include "mediacodec_sw_buffer.h"
40 #include "mediacodec_wrapper.h"
41 #include "mediacodecdec_common.h"
42 
43 /**
44  * OMX.k3.video.decoder.avc, OMX.NVIDIA.* OMX.SEC.avc.dec and OMX.google
45  * codec workarounds used in various place are taken from the Gstreamer
46  * project.
47  *
48  * Gstreamer references:
49  * https://cgit.freedesktop.org/gstreamer/gst-plugins-bad/tree/sys/androidmedia/
50  *
51  * Gstreamer copyright notice:
52  *
53  * Copyright (C) 2012, Collabora Ltd.
54  * Author: Sebastian Dröge <sebastian.droege@collabora.co.uk>
55  *
56  * Copyright (C) 2012, Rafaël Carré <funman@videolanorg>
57  *
58  * Copyright (C) 2015, Sebastian Dröge <sebastian@centricular.com>
59  *
60  * Copyright (C) 2014-2015, Collabora Ltd.
61  * Author: Matthieu Bouron <matthieu.bouron@gcollabora.com>
62  *
63  * Copyright (C) 2015, Edward Hervey
64  * Author: Edward Hervey <bilboed@gmail.com>
65  *
66  * Copyright (C) 2015, Matthew Waters <matthew@centricular.com>
67  *
68  * This library is free software; you can redistribute it and/or
69  * modify it under the terms of the GNU Lesser General Public
70  * License as published by the Free Software Foundation
71  * version 2.1 of the License.
72  *
73  * This library is distributed in the hope that it will be useful,
74  * but WITHOUT ANY WARRANTY; without even the implied warranty of
75  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
76  * Lesser General Public License for more details.
77  *
78  * You should have received a copy of the GNU Lesser General Public
79  * License along with this library; if not, write to the Free Software
80  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
81  *
82  */
83 
84 #define INPUT_DEQUEUE_TIMEOUT_US 8000
85 #define OUTPUT_DEQUEUE_TIMEOUT_US 8000
86 #define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US 1000000
87 
88 enum {
91 };
92 
94 {
95  switch (color_range) {
96  case COLOR_RANGE_FULL:
97  return AVCOL_RANGE_JPEG;
99  return AVCOL_RANGE_MPEG;
100  default:
102  }
103 }
104 
105 enum {
110 };
111 
112 static enum AVColorSpace mcdec_get_color_space(int color_standard)
113 {
114  switch (color_standard) {
116  return AVCOL_SPC_BT709;
118  return AVCOL_SPC_BT470BG;
120  return AVCOL_SPC_SMPTE170M;
122  return AVCOL_SPC_BT2020_NCL;
123  default:
124  return AVCOL_SPC_UNSPECIFIED;
125  }
126 }
127 
128 static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
129 {
130  switch (color_standard) {
132  return AVCOL_PRI_BT709;
134  return AVCOL_PRI_BT470BG;
136  return AVCOL_PRI_SMPTE170M;
138  return AVCOL_PRI_BT2020;
139  default:
140  return AVCOL_PRI_UNSPECIFIED;
141  }
142 }
143 
144 enum {
149 };
150 
151 static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
152 {
153  switch (color_transfer) {
155  return AVCOL_TRC_LINEAR;
157  return AVCOL_TRC_SMPTE170M;
159  return AVCOL_TRC_SMPTEST2084;
160  case COLOR_TRANSFER_HLG:
161  return AVCOL_TRC_ARIB_STD_B67;
162  default:
163  return AVCOL_TRC_UNSPECIFIED;
164  }
165 }
166 
167 enum {
177 };
178 
179 static const struct {
180 
182  enum AVPixelFormat pix_fmt;
183 
184 } color_formats[] = {
185 
193  { 0 }
194 };
195 
198  int color_format)
199 {
200  int i;
201  enum AVPixelFormat ret = AV_PIX_FMT_NONE;
202 
203  if (s->surface) {
204  return AV_PIX_FMT_MEDIACODEC;
205  }
206 
207  if (!strcmp(s->codec_name, "OMX.k3.video.decoder.avc") && color_format == COLOR_FormatYCbYCr) {
209  }
210 
211  for (i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
213  return color_formats[i].pix_fmt;
214  }
215  }
216 
217  av_log(avctx, AV_LOG_ERROR, "Output color format 0x%x (value=%d) is not supported\n",
219 
220  return ret;
221 }
222 
224 {
225  atomic_fetch_add(&s->refcount, 1);
226 }
227 
229 {
230  if (!s)
231  return;
232 
233  if (atomic_fetch_sub(&s->refcount, 1) == 1) {
234  if (s->codec) {
235  ff_AMediaCodec_delete(s->codec);
236  s->codec = NULL;
237  }
238 
239  if (s->format) {
240  ff_AMediaFormat_delete(s->format);
241  s->format = NULL;
242  }
243 
244  if (s->surface) {
246  s->surface = NULL;
247  }
248 
249  av_freep(&s->codec_name);
250  av_freep(&s);
251  }
252 }
253 
254 static void mediacodec_buffer_release(void *opaque, uint8_t *data)
255 {
256  AVMediaCodecBuffer *buffer = opaque;
258  int released = atomic_load(&buffer->released);
259 
260  if (!released && (ctx->delay_flush || buffer->serial == atomic_load(&ctx->serial))) {
261  atomic_fetch_sub(&ctx->hw_buffer_count, 1);
262  av_log(ctx->avctx, AV_LOG_DEBUG,
263  "Releasing output buffer %zd (%p) ts=%"PRId64" on free() [%d pending]\n",
264  buffer->index, buffer, buffer->pts, atomic_load(&ctx->hw_buffer_count));
265  ff_AMediaCodec_releaseOutputBuffer(ctx->codec, buffer->index, 0);
266  }
267 
268  if (ctx->delay_flush)
270  av_freep(&buffer);
271 }
272 
275  ssize_t index,
277  AVFrame *frame)
278 {
279  int ret = 0;
280  int status = 0;
281  AVMediaCodecBuffer *buffer = NULL;
282 
283  frame->buf[0] = NULL;
284  frame->width = avctx->width;
285  frame->height = avctx->height;
286  frame->format = avctx->pix_fmt;
288 
289  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
292  avctx->pkt_timebase);
293  } else {
294  frame->pts = info->presentationTimeUs;
295  }
296 #if FF_API_PKT_PTS
298  frame->pkt_pts = frame->pts;
300 #endif
302  frame->color_range = avctx->color_range;
304  frame->color_trc = avctx->color_trc;
305  frame->colorspace = avctx->colorspace;
306 
307  buffer = av_mallocz(sizeof(AVMediaCodecBuffer));
308  if (!buffer) {
309  ret = AVERROR(ENOMEM);
310  goto fail;
311  }
312 
313  atomic_init(&buffer->released, 0);
314 
316  0,
318  buffer,
320 
321  if (!frame->buf[0]) {
322  ret = AVERROR(ENOMEM);
323  goto fail;
324 
325  }
326 
327  buffer->ctx = s;
328  buffer->serial = atomic_load(&s->serial);
329  if (s->delay_flush)
331 
332  buffer->index = index;
333  buffer->pts = info->presentationTimeUs;
334 
335  frame->data[3] = (uint8_t *)buffer;
336 
337  atomic_fetch_add(&s->hw_buffer_count, 1);
338  av_log(avctx, AV_LOG_DEBUG,
339  "Wrapping output buffer %zd (%p) ts=%"PRId64" [%d pending]\n",
340  buffer->index, buffer, buffer->pts, atomic_load(&s->hw_buffer_count));
341 
342  return 0;
343 fail:
344  av_freep(buffer);
345  av_buffer_unref(&frame->buf[0]);
346  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
347  if (status < 0) {
348  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
349  ret = AVERROR_EXTERNAL;
350  }
351 
352  return ret;
353 }
354 
357  uint8_t *data,
358  size_t size,
359  ssize_t index,
361  AVFrame *frame)
362 {
363  int ret = 0;
364  int status = 0;
365 
366  frame->width = avctx->width;
367  frame->height = avctx->height;
368  frame->format = avctx->pix_fmt;
369 
370  /* MediaCodec buffers needs to be copied to our own refcounted buffers
371  * because the flush command invalidates all input and output buffers.
372  */
373  if ((ret = ff_get_buffer(avctx, frame, 0)) < 0) {
374  av_log(avctx, AV_LOG_ERROR, "Could not allocate buffer\n");
375  goto done;
376  }
377 
378  /* Override frame->pkt_pts as ff_get_buffer will override its value based
379  * on the last avpacket received which is not in sync with the frame:
380  * * N avpackets can be pushed before 1 frame is actually returned
381  * * 0-sized avpackets are pushed to flush remaining frames at EOS */
382  if (avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
385  avctx->pkt_timebase);
386  } else {
387  frame->pts = info->presentationTimeUs;
388  }
389 #if FF_API_PKT_PTS
391  frame->pkt_pts = frame->pts;
393 #endif
395 
396  av_log(avctx, AV_LOG_TRACE,
397  "Frame: width=%d stride=%d height=%d slice-height=%d "
398  "crop-top=%d crop-bottom=%d crop-left=%d crop-right=%d encoder=%s "
399  "destination linesizes=%d,%d,%d\n" ,
400  avctx->width, s->stride, avctx->height, s->slice_height,
401  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right, s->codec_name,
402  frame->linesize[0], frame->linesize[1], frame->linesize[2]);
403 
404  switch (s->color_format) {
407  break;
412  break;
416  break;
419  break;
420  default:
421  av_log(avctx, AV_LOG_ERROR, "Unsupported color format 0x%x (value=%d)\n",
422  s->color_format, s->color_format);
423  ret = AVERROR(EINVAL);
424  goto done;
425  }
426 
427  ret = 0;
428 done:
429  status = ff_AMediaCodec_releaseOutputBuffer(s->codec, index, 0);
430  if (status < 0) {
431  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
432  ret = AVERROR_EXTERNAL;
433  }
434 
435  return ret;
436 }
437 
438 #define AMEDIAFORMAT_GET_INT32(name, key, mandatory) do { \
439  int32_t value = 0; \
440  if (ff_AMediaFormat_getInt32(s->format, key, &value)) { \
441  (name) = value; \
442  } else if (mandatory) { \
443  av_log(avctx, AV_LOG_ERROR, "Could not get %s from format %s\n", key, format); \
444  ret = AVERROR_EXTERNAL; \
445  goto fail; \
446  } \
447 } while (0) \
448 
450 {
451  int ret = 0;
452  int width = 0;
453  int height = 0;
454  int color_range = 0;
455  int color_standard = 0;
456  int color_transfer = 0;
457  char *format = NULL;
458 
459  if (!s->format) {
460  av_log(avctx, AV_LOG_ERROR, "Output MediaFormat is not set\n");
461  return AVERROR(EINVAL);
462  }
463 
464  format = ff_AMediaFormat_toString(s->format);
465  if (!format) {
466  return AVERROR_EXTERNAL;
467  }
468  av_log(avctx, AV_LOG_DEBUG, "Parsing MediaFormat %s\n", format);
469 
470  /* Mandatory fields */
471  AMEDIAFORMAT_GET_INT32(s->width, "width", 1);
472  AMEDIAFORMAT_GET_INT32(s->height, "height", 1);
473 
474  AMEDIAFORMAT_GET_INT32(s->stride, "stride", 0);
475  s->stride = s->stride > 0 ? s->stride : s->width;
476 
477  AMEDIAFORMAT_GET_INT32(s->slice_height, "slice-height", 0);
478 
479  if (strstr(s->codec_name, "OMX.Nvidia.") && s->slice_height == 0) {
480  s->slice_height = FFALIGN(s->height, 16);
481  } else if (strstr(s->codec_name, "OMX.SEC.avc.dec")) {
482  s->slice_height = avctx->height;
483  s->stride = avctx->width;
484  } else if (s->slice_height == 0) {
485  s->slice_height = s->height;
486  }
487 
488  AMEDIAFORMAT_GET_INT32(s->color_format, "color-format", 1);
489  avctx->pix_fmt = mcdec_map_color_format(avctx, s, s->color_format);
490  if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
491  av_log(avctx, AV_LOG_ERROR, "Output color format is not supported\n");
492  ret = AVERROR(EINVAL);
493  goto fail;
494  }
495 
496  /* Optional fields */
497  AMEDIAFORMAT_GET_INT32(s->crop_top, "crop-top", 0);
498  AMEDIAFORMAT_GET_INT32(s->crop_bottom, "crop-bottom", 0);
499  AMEDIAFORMAT_GET_INT32(s->crop_left, "crop-left", 0);
500  AMEDIAFORMAT_GET_INT32(s->crop_right, "crop-right", 0);
501 
502  width = s->crop_right + 1 - s->crop_left;
503  height = s->crop_bottom + 1 - s->crop_top;
504 
505  AMEDIAFORMAT_GET_INT32(s->display_width, "display-width", 0);
506  AMEDIAFORMAT_GET_INT32(s->display_height, "display-height", 0);
507 
508  if (s->display_width && s->display_height) {
509  AVRational sar = av_div_q(
510  (AVRational){ s->display_width, s->display_height },
511  (AVRational){ width, height });
512  ff_set_sar(avctx, sar);
513  }
514 
515  AMEDIAFORMAT_GET_INT32(color_range, "color-range", 0);
516  if (color_range)
518 
519  AMEDIAFORMAT_GET_INT32(color_standard, "color-standard", 0);
520  if (color_standard) {
521  avctx->colorspace = mcdec_get_color_space(color_standard);
522  avctx->color_primaries = mcdec_get_color_pri(color_standard);
523  }
524 
525  AMEDIAFORMAT_GET_INT32(color_transfer, "color-transfer", 0);
526  if (color_transfer)
527  avctx->color_trc = mcdec_get_color_trc(color_transfer);
528 
529  av_log(avctx, AV_LOG_INFO,
530  "Output crop parameters top=%d bottom=%d left=%d right=%d, "
531  "resulting dimensions width=%d height=%d\n",
532  s->crop_top, s->crop_bottom, s->crop_left, s->crop_right,
533  width, height);
534 
535  av_freep(&format);
536  return ff_set_dimensions(avctx, width, height);
537 fail:
538  av_freep(&format);
539  return ret;
540 }
541 
543 {
544  FFAMediaCodec *codec = s->codec;
545  int status;
546 
547  s->output_buffer_count = 0;
548 
549  s->draining = 0;
550  s->flushing = 0;
551  s->eos = 0;
552  atomic_fetch_add(&s->serial, 1);
553  atomic_init(&s->hw_buffer_count, 0);
554  s->current_input_buffer = -1;
555 
556  status = ff_AMediaCodec_flush(codec);
557  if (status < 0) {
558  av_log(avctx, AV_LOG_ERROR, "Failed to flush codec\n");
559  return AVERROR_EXTERNAL;
560  }
561 
562  return 0;
563 }
564 
566  const char *mime, FFAMediaFormat *format)
567 {
568  int ret = 0;
569  int status;
570  int profile;
571 
572  enum AVPixelFormat pix_fmt;
573  static const enum AVPixelFormat pix_fmts[] = {
576  };
577 
578  s->avctx = avctx;
579  atomic_init(&s->refcount, 1);
580  atomic_init(&s->hw_buffer_count, 0);
581  atomic_init(&s->serial, 1);
582  s->current_input_buffer = -1;
583 
584  pix_fmt = ff_get_format(avctx, pix_fmts);
586  AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
587 
588  if (avctx->hw_device_ctx) {
589  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
590  if (device_ctx->type == AV_HWDEVICE_TYPE_MEDIACODEC) {
591  if (device_ctx->hwctx) {
592  AVMediaCodecDeviceContext *mediacodec_ctx = (AVMediaCodecDeviceContext *)device_ctx->hwctx;
593  s->surface = ff_mediacodec_surface_ref(mediacodec_ctx->surface, avctx);
594  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
595  }
596  }
597  }
598 
599  if (!s->surface && user_ctx && user_ctx->surface) {
600  s->surface = ff_mediacodec_surface_ref(user_ctx->surface, avctx);
601  av_log(avctx, AV_LOG_INFO, "Using surface %p\n", s->surface);
602  }
603  }
604 
606  if (profile < 0) {
607  av_log(avctx, AV_LOG_WARNING, "Unsupported or unknown profile\n");
608  }
609 
610  s->codec_name = ff_AMediaCodecList_getCodecNameByType(mime, profile, 0, avctx);
611  if (!s->codec_name) {
612  ret = AVERROR_EXTERNAL;
613  goto fail;
614  }
615 
616  av_log(avctx, AV_LOG_DEBUG, "Found decoder %s\n", s->codec_name);
617  s->codec = ff_AMediaCodec_createCodecByName(s->codec_name);
618  if (!s->codec) {
619  av_log(avctx, AV_LOG_ERROR, "Failed to create media decoder for type %s and name %s\n", mime, s->codec_name);
620  ret = AVERROR_EXTERNAL;
621  goto fail;
622  }
623 
624  status = ff_AMediaCodec_configure(s->codec, format, s->surface, NULL, 0);
625  if (status < 0) {
627  av_log(avctx, AV_LOG_ERROR,
628  "Failed to configure codec %s (status = %d) with format %s\n",
629  s->codec_name, status, desc);
630  av_freep(&desc);
631 
632  ret = AVERROR_EXTERNAL;
633  goto fail;
634  }
635 
636  status = ff_AMediaCodec_start(s->codec);
637  if (status < 0) {
639  av_log(avctx, AV_LOG_ERROR,
640  "Failed to start codec %s (status = %d) with format %s\n",
641  s->codec_name, status, desc);
642  av_freep(&desc);
643  ret = AVERROR_EXTERNAL;
644  goto fail;
645  }
646 
647  s->format = ff_AMediaCodec_getOutputFormat(s->codec);
648  if (s->format) {
649  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
650  av_log(avctx, AV_LOG_ERROR,
651  "Failed to configure context\n");
652  goto fail;
653  }
654  }
655 
656  av_log(avctx, AV_LOG_DEBUG, "MediaCodec %p started successfully\n", s->codec);
657 
658  return 0;
659 
660 fail:
661  av_log(avctx, AV_LOG_ERROR, "MediaCodec %p failed to start\n", s->codec);
662  ff_mediacodec_dec_close(avctx, s);
663  return ret;
664 }
665 
667  AVPacket *pkt, bool wait)
668 {
669  int offset = 0;
670  int need_draining = 0;
671  uint8_t *data;
672  size_t size;
673  FFAMediaCodec *codec = s->codec;
674  int status;
675  int64_t input_dequeue_timeout_us = wait ? INPUT_DEQUEUE_TIMEOUT_US : 0;
676  int64_t pts;
677 
678  if (s->flushing) {
679  av_log(avctx, AV_LOG_ERROR, "Decoder is flushing and cannot accept new buffer "
680  "until all output buffers have been released\n");
681  return AVERROR_EXTERNAL;
682  }
683 
684  if (pkt->size == 0) {
685  need_draining = 1;
686  }
687 
688  if (s->draining && s->eos) {
689  return AVERROR_EOF;
690  }
691 
692  while (offset < pkt->size || (need_draining && !s->draining)) {
693  ssize_t index = s->current_input_buffer;
694  if (index < 0) {
695  index = ff_AMediaCodec_dequeueInputBuffer(codec, input_dequeue_timeout_us);
697  av_log(avctx, AV_LOG_TRACE, "No input buffer available, try again later\n");
698  break;
699  }
700 
701  if (index < 0) {
702  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue input buffer (status=%zd)\n", index);
703  return AVERROR_EXTERNAL;
704  }
705  }
706  s->current_input_buffer = -1;
707 
709  if (!data) {
710  av_log(avctx, AV_LOG_ERROR, "Failed to get input buffer\n");
711  return AVERROR_EXTERNAL;
712  }
713 
714  pts = pkt->pts;
715  if (pts == AV_NOPTS_VALUE) {
716  av_log(avctx, AV_LOG_WARNING, "Input packet is missing PTS\n");
717  pts = 0;
718  }
719  if (pts && avctx->pkt_timebase.num && avctx->pkt_timebase.den) {
721  }
722 
723  if (need_draining) {
725 
726  av_log(avctx, AV_LOG_DEBUG, "Sending End Of Stream signal\n");
727 
728  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, 0, pts, flags);
729  if (status < 0) {
730  av_log(avctx, AV_LOG_ERROR, "Failed to queue input empty buffer (status = %d)\n", status);
731  return AVERROR_EXTERNAL;
732  }
733 
734  av_log(avctx, AV_LOG_TRACE,
735  "Queued empty EOS input buffer %zd with flags=%d\n", index, flags);
736 
737  s->draining = 1;
738  return 0;
739  }
740 
741  size = FFMIN(pkt->size - offset, size);
742  memcpy(data, pkt->data + offset, size);
743  offset += size;
744 
745  status = ff_AMediaCodec_queueInputBuffer(codec, index, 0, size, pts, 0);
746  if (status < 0) {
747  av_log(avctx, AV_LOG_ERROR, "Failed to queue input buffer (status = %d)\n", status);
748  return AVERROR_EXTERNAL;
749  }
750 
751  av_log(avctx, AV_LOG_TRACE,
752  "Queued input buffer %zd size=%zd ts=%"PRIi64"\n", index, size, pts);
753  }
754 
755  if (offset == 0)
756  return AVERROR(EAGAIN);
757  return offset;
758 }
759 
761  AVFrame *frame, bool wait)
762 {
763  int ret;
764  uint8_t *data;
765  ssize_t index;
766  size_t size;
767  FFAMediaCodec *codec = s->codec;
768  FFAMediaCodecBufferInfo info = { 0 };
769  int status;
770  int64_t output_dequeue_timeout_us = OUTPUT_DEQUEUE_TIMEOUT_US;
771 
772  if (s->draining && s->eos) {
773  return AVERROR_EOF;
774  }
775 
776  if (s->draining) {
777  /* If the codec is flushing or need to be flushed, block for a fair
778  * amount of time to ensure we got a frame */
779  output_dequeue_timeout_us = OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US;
780  } else if (s->output_buffer_count == 0 || !wait) {
781  /* If the codec hasn't produced any frames, do not block so we
782  * can push data to it as fast as possible, and get the first
783  * frame */
784  output_dequeue_timeout_us = 0;
785  }
786 
787  index = ff_AMediaCodec_dequeueOutputBuffer(codec, &info, output_dequeue_timeout_us);
788  if (index >= 0) {
789  av_log(avctx, AV_LOG_TRACE, "Got output buffer %zd"
790  " offset=%" PRIi32 " size=%" PRIi32 " ts=%" PRIi64
791  " flags=%" PRIu32 "\n", index, info.offset, info.size,
792  info.presentationTimeUs, info.flags);
793 
795  s->eos = 1;
796  }
797 
798  if (info.size) {
799  if (s->surface) {
800  if ((ret = mediacodec_wrap_hw_buffer(avctx, s, index, &info, frame)) < 0) {
801  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
802  return ret;
803  }
804  } else {
806  if (!data) {
807  av_log(avctx, AV_LOG_ERROR, "Failed to get output buffer\n");
808  return AVERROR_EXTERNAL;
809  }
810 
811  if ((ret = mediacodec_wrap_sw_buffer(avctx, s, data, size, index, &info, frame)) < 0) {
812  av_log(avctx, AV_LOG_ERROR, "Failed to wrap MediaCodec buffer\n");
813  return ret;
814  }
815  }
816 
817  s->output_buffer_count++;
818  return 0;
819  } else {
820  status = ff_AMediaCodec_releaseOutputBuffer(codec, index, 0);
821  if (status < 0) {
822  av_log(avctx, AV_LOG_ERROR, "Failed to release output buffer\n");
823  }
824  }
825 
826  } else if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
827  char *format = NULL;
828 
829  if (s->format) {
830  status = ff_AMediaFormat_delete(s->format);
831  if (status < 0) {
832  av_log(avctx, AV_LOG_ERROR, "Failed to delete MediaFormat %p\n", s->format);
833  }
834  }
835 
836  s->format = ff_AMediaCodec_getOutputFormat(codec);
837  if (!s->format) {
838  av_log(avctx, AV_LOG_ERROR, "Failed to get output format\n");
839  return AVERROR_EXTERNAL;
840  }
841 
842  format = ff_AMediaFormat_toString(s->format);
843  if (!format) {
844  return AVERROR_EXTERNAL;
845  }
846  av_log(avctx, AV_LOG_INFO, "Output MediaFormat changed to %s\n", format);
847  av_freep(&format);
848 
849  if ((ret = mediacodec_dec_parse_format(avctx, s)) < 0) {
850  return ret;
851  }
852 
853  } else if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
855  } else if (ff_AMediaCodec_infoTryAgainLater(codec, index)) {
856  if (s->draining) {
857  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer within %" PRIi64 "ms "
858  "while draining remaining frames, output will probably lack frames\n",
859  output_dequeue_timeout_us / 1000);
860  } else {
861  av_log(avctx, AV_LOG_TRACE, "No output buffer available, try again later\n");
862  }
863  } else {
864  av_log(avctx, AV_LOG_ERROR, "Failed to dequeue output buffer (status=%zd)\n", index);
865  return AVERROR_EXTERNAL;
866  }
867 
868  return AVERROR(EAGAIN);
869 }
870 
871 /*
872 * ff_mediacodec_dec_flush returns 0 if the flush cannot be performed on
873 * the codec (because the user retains frames). The codec stays in the
874 * flushing state.
875 *
876 * ff_mediacodec_dec_flush returns 1 if the flush can actually be
877 * performed on the codec. The codec leaves the flushing state and can
878 * process again packets.
879 *
880 * ff_mediacodec_dec_flush returns a negative value if an error has
881 * occurred.
882 */
884 {
885  if (!s->surface || atomic_load(&s->refcount) == 1) {
886  int ret;
887 
888  /* No frames (holding a reference to the codec) are retained by the
889  * user, thus we can flush the codec and returns accordingly */
890  if ((ret = mediacodec_dec_flush_codec(avctx, s)) < 0) {
891  return ret;
892  }
893 
894  return 1;
895  }
896 
897  s->flushing = 1;
898  return 0;
899 }
900 
902 {
904 
905  return 0;
906 }
907 
909 {
910  return s->flushing;
911 }
static const char *const format[]
Definition: af_aiir.c:456
uint8_t
Libavcodec external API header.
#define flags(name, subs,...)
Definition: cbs_av1.c:561
#define s(width, name)
Definition: cbs_vp9.c:257
#define fail()
Definition: checkasm.h:133
common internal and external API header
#define FFMIN(a, b)
Definition: common.h:105
#define NULL
Definition: coverity.c:32
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1893
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1317
static AVFrame * frame
#define atomic_load(object)
Definition: stdatomic.h:93
#define atomic_init(obj, value)
Definition: stdatomic.h:33
#define atomic_fetch_sub(object, operand)
Definition: stdatomic.h:137
#define atomic_fetch_add(object, operand)
Definition: stdatomic.h:131
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:125
AVBufferRef * av_buffer_create(uint8_t *data, buffer_size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:29
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:128
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:57
#define AVERROR_EOF
End of file.
Definition: error.h:55
#define AVERROR(e)
Definition: error.h:43
#define AV_LOG_TRACE
Extremely verbose debugging, useful for libav* development.
Definition: log.h:220
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:215
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:200
#define AV_LOG_INFO
Standard information.
Definition: log.h:205
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:194
AVRational av_div_q(AVRational b, AVRational c)
Divide one rational by another.
Definition: rational.c:88
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:248
#define AV_TIME_BASE_Q
Internal time base represented as fractional value.
Definition: avutil.h:260
int index
Definition: gxfenc.c:89
@ AV_HWDEVICE_TYPE_MEDIACODEC
Definition: hwcontext.h:38
int i
Definition: input.c:407
int ff_set_sar(AVCodecContext *avctx, AVRational sar)
Check that the provided sample aspect ratio is valid and set it on the codec context.
Definition: utils.c:99
int ff_set_dimensions(AVCodecContext *s, int width, int height)
Check that the provided frame dimensions are valid and set them on the codec context.
Definition: utils.c:84
common internal API header
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:83
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:303
const char * desc
Definition: libsvtav1.c:79
#define FFALIGN(x, a)
Definition: macros.h:48
int ff_mediacodec_surface_unref(FFANativeWindow *window, void *log_ctx)
FFANativeWindow * ff_mediacodec_surface_ref(void *surface, void *log_ctx)
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar_64x32Tile2m8ka(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
void ff_mediacodec_sw_buffer_copy_yuv420_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
void ff_mediacodec_sw_buffer_copy_yuv420_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
The code handling the various YUV color formats is taken from the GStreamer project.
void ff_mediacodec_sw_buffer_copy_yuv420_packed_semi_planar(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, FFAMediaCodecBufferInfo *info, AVFrame *frame)
uint8_t * ff_AMediaCodec_getInputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
int ff_AMediaCodec_delete(FFAMediaCodec *codec)
int ff_AMediaCodec_configure(FFAMediaCodec *codec, const FFAMediaFormat *format, void *surface, void *crypto, uint32_t flags)
char * ff_AMediaFormat_toString(FFAMediaFormat *format)
int ff_AMediaCodec_start(FFAMediaCodec *codec)
int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
int ff_AMediaCodec_infoOutputBuffersChanged(FFAMediaCodec *codec, ssize_t idx)
int ff_AMediaFormat_delete(FFAMediaFormat *format)
uint8_t * ff_AMediaCodec_getOutputBuffer(FFAMediaCodec *codec, size_t idx, size_t *out_size)
ssize_t ff_AMediaCodec_dequeueInputBuffer(FFAMediaCodec *codec, int64_t timeoutUs)
char * ff_AMediaCodecList_getCodecNameByType(const char *mime, int profile, int encoder, void *log_ctx)
int ff_AMediaCodec_queueInputBuffer(FFAMediaCodec *codec, size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags)
int ff_AMediaCodec_releaseOutputBuffer(FFAMediaCodec *codec, size_t idx, int render)
int ff_AMediaCodec_infoTryAgainLater(FFAMediaCodec *codec, ssize_t idx)
int ff_AMediaCodec_flush(FFAMediaCodec *codec)
FFAMediaFormat * ff_AMediaCodec_getOutputFormat(FFAMediaCodec *codec)
int ff_AMediaCodecProfile_getProfileFromAVCodecContext(AVCodecContext *avctx)
The following API around MediaCodec and MediaFormat is based on the NDK one provided by Google since ...
ssize_t ff_AMediaCodec_dequeueOutputBuffer(FFAMediaCodec *codec, FFAMediaCodecBufferInfo *info, int64_t timeoutUs)
int ff_AMediaCodec_infoOutputFormatChanged(FFAMediaCodec *codec, ssize_t idx)
int ff_AMediaCodec_getBufferFlagEndOfStream(FFAMediaCodec *codec)
FFAMediaCodec * ff_AMediaCodec_createCodecByName(const char *name)
@ COLOR_STANDARD_BT601_PAL
@ COLOR_STANDARD_BT709
@ COLOR_STANDARD_BT2020
@ COLOR_STANDARD_BT601_NTSC
static enum AVColorSpace mcdec_get_color_space(int color_standard)
int ff_mediacodec_dec_send(AVCodecContext *avctx, MediaCodecDecContext *s, AVPacket *pkt, bool wait)
static int mediacodec_wrap_hw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
static enum AVColorTransferCharacteristic mcdec_get_color_trc(int color_transfer)
int ff_mediacodec_dec_flush(AVCodecContext *avctx, MediaCodecDecContext *s)
static enum AVColorRange mcdec_get_color_range(int color_range)
int ff_mediacodec_dec_receive(AVCodecContext *avctx, MediaCodecDecContext *s, AVFrame *frame, bool wait)
#define OUTPUT_DEQUEUE_TIMEOUT_US
static int mediacodec_dec_flush_codec(AVCodecContext *avctx, MediaCodecDecContext *s)
enum AVPixelFormat pix_fmt
@ COLOR_TI_FormatYUV420PackedSemiPlanar
@ COLOR_QCOM_FormatYUV420SemiPlanar32m
@ COLOR_TI_FormatYUV420PackedSemiPlanarInterlaced
@ COLOR_FormatYCbYCr
@ COLOR_FormatYUV420Planar
@ COLOR_QCOM_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
@ COLOR_FormatAndroidOpaque
@ COLOR_QCOM_FormatYUV420SemiPlanar
@ COLOR_FormatYUV420SemiPlanar
static int mediacodec_wrap_sw_buffer(AVCodecContext *avctx, MediaCodecDecContext *s, uint8_t *data, size_t size, ssize_t index, FFAMediaCodecBufferInfo *info, AVFrame *frame)
static int mediacodec_dec_parse_format(AVCodecContext *avctx, MediaCodecDecContext *s)
int ff_mediacodec_dec_close(AVCodecContext *avctx, MediaCodecDecContext *s)
@ COLOR_TRANSFER_HLG
@ COLOR_TRANSFER_ST2084
@ COLOR_TRANSFER_SDR_VIDEO
@ COLOR_TRANSFER_LINEAR
#define INPUT_DEQUEUE_TIMEOUT_US
OMX.k3.video.decoder.avc, OMX.NVIDIA.
#define OUTPUT_DEQUEUE_BLOCK_TIMEOUT_US
int ff_mediacodec_dec_init(AVCodecContext *avctx, MediaCodecDecContext *s, const char *mime, FFAMediaFormat *format)
static void ff_mediacodec_dec_ref(MediaCodecDecContext *s)
int ff_mediacodec_dec_is_flushing(AVCodecContext *avctx, MediaCodecDecContext *s)
static enum AVColorPrimaries mcdec_get_color_pri(int color_standard)
int color_format
static void ff_mediacodec_dec_unref(MediaCodecDecContext *s)
#define AMEDIAFORMAT_GET_INT32(name, key, mandatory)
static enum AVPixelFormat mcdec_map_color_format(AVCodecContext *avctx, MediaCodecDecContext *s, int color_format)
@ COLOR_RANGE_FULL
@ COLOR_RANGE_LIMITED
static void mediacodec_buffer_release(void *opaque, uint8_t *data)
static const struct @99 color_formats[]
Memory handling functions.
const char data[16]
Definition: mxf.c:142
pixel format definitions
AVColorRange
Visual content value range.
Definition: pixfmt.h:551
@ AVCOL_RANGE_MPEG
Narrow or limited range content.
Definition: pixfmt.h:569
@ AVCOL_RANGE_UNSPECIFIED
Definition: pixfmt.h:552
@ AVCOL_RANGE_JPEG
Full range content.
Definition: pixfmt.h:586
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:65
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
@ AV_PIX_FMT_MEDIACODEC
hardware decoding through MediaCodec
Definition: pixfmt.h:293
AVColorPrimaries
Chromaticity coordinates of the source primaries.
Definition: pixfmt.h:458
@ AVCOL_PRI_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
Definition: pixfmt.h:465
@ AVCOL_PRI_BT709
also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
Definition: pixfmt.h:460
@ AVCOL_PRI_UNSPECIFIED
Definition: pixfmt.h:461
@ AVCOL_PRI_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:466
@ AVCOL_PRI_BT2020
ITU-R BT2020.
Definition: pixfmt.h:469
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:483
@ AVCOL_TRC_SMPTE170M
also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
Definition: pixfmt.h:490
@ AVCOL_TRC_SMPTEST2084
Definition: pixfmt.h:501
@ AVCOL_TRC_LINEAR
"Linear transfer characteristics"
Definition: pixfmt.h:492
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:504
@ AVCOL_TRC_UNSPECIFIED
Definition: pixfmt.h:486
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:512
@ AVCOL_SPC_BT709
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:514
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:518
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:523
@ AVCOL_SPC_UNSPECIFIED
Definition: pixfmt.h:515
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
Definition: pixfmt.h:519
mfxU16 profile
Definition: qsvenc.c:45
static char buffer[20]
Definition: seek.c:32
#define FF_ARRAY_ELEMS(a)
uint8_t * data
The data buffer.
Definition: buffer.h:92
main external API structure.
Definition: avcodec.h:536
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:746
int width
picture width / height.
Definition: avcodec.h:709
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: avcodec.h:1171
AVRational pkt_timebase
Timebase in which pkt_dts/pts and AVPacket.dts/pts are.
Definition: avcodec.h:2085
enum AVColorPrimaries color_primaries
Chromaticity coordinates of the source primaries.
Definition: avcodec.h:1150
AVRational sample_aspect_ratio
sample aspect ratio (0 if unknown) That is the width of a pixel divided by the height of the pixel.
Definition: avcodec.h:915
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:1164
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:1692
enum AVColorTransferCharacteristic color_trc
Color Transfer Characteristic.
Definition: avcodec.h:1157
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:2270
This structure describes decoded (raw) audio or video data.
Definition: frame.h:318
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:411
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:332
int width
Definition: frame.h:376
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:419
int height
Definition: frame.h:376
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:509
enum AVColorPrimaries color_primaries
Definition: frame.h:564
AVRational sample_aspect_ratio
Sample aspect ratio for the video frame, 0/1 if unknown/unspecified.
Definition: frame.h:406
enum AVColorRange color_range
MPEG vs JPEG YUV range.
Definition: frame.h:562
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:573
int64_t pkt_dts
DTS copied from the AVPacket that triggered returning this frame.
Definition: frame.h:427
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:349
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:566
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames,...
Definition: frame.h:391
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:61
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:92
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:79
This structure holds a reference to a android/view/Surface object that will be used as output by the ...
Definition: mediacodec.h:33
void * surface
android/view/Surface object reference.
Definition: mediacodec.h:38
void * surface
android/view/Surface handle, to be filled by the user.
This structure stores compressed data.
Definition: packet.h:346
int size
Definition: packet.h:370
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:362
uint8_t * data
Definition: packet.h:369
Rational number (pair of numerator and denominator).
Definition: rational.h:58
int num
Numerator.
Definition: rational.h:59
int den
Denominator.
Definition: rational.h:60
#define av_freep(p)
#define av_log(a,...)
AVPacket * pkt
Definition: movenc.c:59
AVFormatContext * ctx
Definition: movenc.c:48
#define height
#define width
timestamp utils, mostly useful for debugging/logging purposes
static int64_t pts
int size
color_range
static const uint8_t offset[127][2]
Definition: vf_spp.c:107