• Main Page
  • Related Pages
  • Modules
  • Data Structures
  • Files
  • Examples
  • File List
  • Globals

libavcodec/crystalhd.c

Go to the documentation of this file.
00001 /*
00002  * - CrystalHD decoder module -
00003  *
00004  * Copyright(C) 2010,2011 Philip Langdale <ffmpeg.philipl@overt.org>
00005  *
00006  * This file is part of FFmpeg.
00007  *
00008  * FFmpeg is free software; you can redistribute it and/or
00009  * modify it under the terms of the GNU Lesser General Public
00010  * License as published by the Free Software Foundation; either
00011  * version 2.1 of the License, or (at your option) any later version.
00012  *
00013  * FFmpeg is distributed in the hope that it will be useful,
00014  * but WITHOUT ANY WARRANTY; without even the implied warranty of
00015  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
00016  * Lesser General Public License for more details.
00017  *
00018  * You should have received a copy of the GNU Lesser General Public
00019  * License along with FFmpeg; if not, write to the Free Software
00020  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
00021  */
00022 
00023 /*
00024  * - Principles of Operation -
00025  *
00026  * The CrystalHD decoder operates at the bitstream level - which is an even
00027  * higher level than the decoding hardware you typically see in modern GPUs.
00028  * This means it has a very simple interface, in principle. You feed demuxed
00029  * packets in one end and get decoded picture (fields/frames) out the other.
00030  *
00031  * Of course, nothing is ever that simple. Due, at the very least, to b-frame
00032  * dependencies in the supported formats, the hardware has a delay between
00033  * when a packet goes in, and when a picture comes out. Furthermore, this delay
00034  * is not just a function of time, but also one of the dependency on additional
00035  * frames being fed into the decoder to satisfy the b-frame dependencies.
00036  *
00037  * As such, a pipeline will build up that is roughly equivalent to the required
00038  * DPB for the file being played. If that was all it took, things would still
00039  * be simple - so, of course, it isn't.
00040  *
00041  * The hardware has a way of indicating that a picture is ready to be copied out,
00042  * but this is unreliable - and sometimes the attempt will still fail so, based
00043  * on testing, the code will wait until 3 pictures are ready before starting
00044  * to copy out - and this has the effect of extending the pipeline.
00045  *
00046  * Finally, while it is tempting to say that once the decoder starts outputing
00047  * frames, the software should never fail to return a frame from a decode(),
00048  * this is a hard assertion to make, because the stream may switch between
00049  * differently encoded content (number of b-frames, interlacing, etc) which
00050  * might require a longer pipeline than before. If that happened, you could
00051  * deadlock trying to retrieve a frame that can't be decoded without feeding
00052  * in additional packets.
00053  *
00054  * As such, the code will return in the event that a picture cannot be copied
00055  * out, leading to an increase in the length of the pipeline. This in turn,
00056  * means we have to be sensitive to the time it takes to decode a picture;
00057  * We do not want to give up just because the hardware needed a little more
00058  * time to prepare the picture! For this reason, there are delays included
00059  * in the decode() path that ensure that, under normal conditions, the hardware
00060  * will only fail to return a frame if it really needs additional packets to
00061  * complete the decoding.
00062  *
00063  * Finally, to be explicit, we do not want the pipeline to grow without bound
00064  * for two reasons: 1) The hardware can only buffer a finite number of packets,
00065  * and 2) The client application may not be able to cope with arbitrarily long
00066  * delays in the video path relative to the audio path. For example. MPlayer
00067  * can only handle a 20 picture delay (although this is arbitrary, and needs
00068  * to be extended to fully support the CrystalHD where the delay could be up
00069  * to 32 pictures - consider PAFF H.264 content with 16 b-frames).
00070  */
00071 
00072 /*****************************************************************************
00073  * Includes
00074  ****************************************************************************/
00075 
00076 #define _XOPEN_SOURCE 600
00077 #include <inttypes.h>
00078 #include <stdio.h>
00079 #include <stdlib.h>
00080 #include <unistd.h>
00081 
00082 #include <libcrystalhd/bc_dts_types.h>
00083 #include <libcrystalhd/bc_dts_defs.h>
00084 #include <libcrystalhd/libcrystalhd_if.h>
00085 
00086 #include "avcodec.h"
00087 #include "h264.h"
00088 #include "libavutil/imgutils.h"
00089 #include "libavutil/intreadwrite.h"
00090 #include "libavutil/opt.h"
00091 
00093 #define OUTPUT_PROC_TIMEOUT 50
00094 
00095 #define TIMESTAMP_UNIT 100000
00096 
00097 #define BASE_WAIT 10000
00098 
00099 #define WAIT_UNIT 1000
00100 
00101 
00102 /*****************************************************************************
00103  * Module private data
00104  ****************************************************************************/
00105 
00106 typedef enum {
00107     RET_ERROR           = -1,
00108     RET_OK              = 0,
00109     RET_COPY_AGAIN      = 1,
00110     RET_SKIP_NEXT_COPY  = 2,
00111     RET_COPY_NEXT_FIELD = 3,
00112 } CopyRet;
00113 
00114 typedef struct OpaqueList {
00115     struct OpaqueList *next;
00116     uint64_t fake_timestamp;
00117     uint64_t reordered_opaque;
00118     uint8_t pic_type;
00119 } OpaqueList;
00120 
00121 typedef struct {
00122     AVClass *av_class;
00123     AVCodecContext *avctx;
00124     AVFrame pic;
00125     HANDLE dev;
00126 
00127     AVBitStreamFilterContext *bsfc;
00128     AVCodecParserContext *parser;
00129 
00130     uint8_t is_70012;
00131     uint8_t *sps_pps_buf;
00132     uint32_t sps_pps_size;
00133     uint8_t is_nal;
00134     uint8_t output_ready;
00135     uint8_t need_second_field;
00136     uint8_t skip_next_output;
00137     uint64_t decode_wait;
00138 
00139     uint64_t last_picture;
00140 
00141     OpaqueList *head;
00142     OpaqueList *tail;
00143 
00144     /* Options */
00145     uint32_t sWidth;
00146     uint8_t bframe_bug;
00147 } CHDContext;
00148 
00149 static const AVOption options[] = {
00150     { "crystalhd_downscale_width",
00151       "Turn on downscaling to the specified width",
00152       offsetof(CHDContext, sWidth),
00153       FF_OPT_TYPE_INT, 0, 0, UINT32_MAX,
00154       AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_DECODING_PARAM, },
00155     { NULL, },
00156 };
00157 
00158 
00159 /*****************************************************************************
00160  * Helper functions
00161  ****************************************************************************/
00162 
00163 static inline BC_MEDIA_SUBTYPE id2subtype(CHDContext *priv, enum CodecID id)
00164 {
00165     switch (id) {
00166     case CODEC_ID_MPEG4:
00167         return BC_MSUBTYPE_DIVX;
00168     case CODEC_ID_MSMPEG4V3:
00169         return BC_MSUBTYPE_DIVX311;
00170     case CODEC_ID_MPEG2VIDEO:
00171         return BC_MSUBTYPE_MPEG2VIDEO;
00172     case CODEC_ID_VC1:
00173         return BC_MSUBTYPE_VC1;
00174     case CODEC_ID_WMV3:
00175         return BC_MSUBTYPE_WMV3;
00176     case CODEC_ID_H264:
00177         return priv->is_nal ? BC_MSUBTYPE_AVC1 : BC_MSUBTYPE_H264;
00178     default:
00179         return BC_MSUBTYPE_INVALID;
00180     }
00181 }
00182 
00183 static inline void print_frame_info(CHDContext *priv, BC_DTS_PROC_OUT *output)
00184 {
00185     av_log(priv->avctx, AV_LOG_VERBOSE, "\tYBuffSz: %u\n", output->YbuffSz);
00186     av_log(priv->avctx, AV_LOG_VERBOSE, "\tYBuffDoneSz: %u\n",
00187            output->YBuffDoneSz);
00188     av_log(priv->avctx, AV_LOG_VERBOSE, "\tUVBuffDoneSz: %u\n",
00189            output->UVBuffDoneSz);
00190     av_log(priv->avctx, AV_LOG_VERBOSE, "\tTimestamp: %"PRIu64"\n",
00191            output->PicInfo.timeStamp);
00192     av_log(priv->avctx, AV_LOG_VERBOSE, "\tPicture Number: %u\n",
00193            output->PicInfo.picture_number);
00194     av_log(priv->avctx, AV_LOG_VERBOSE, "\tWidth: %u\n",
00195            output->PicInfo.width);
00196     av_log(priv->avctx, AV_LOG_VERBOSE, "\tHeight: %u\n",
00197            output->PicInfo.height);
00198     av_log(priv->avctx, AV_LOG_VERBOSE, "\tChroma: 0x%03x\n",
00199            output->PicInfo.chroma_format);
00200     av_log(priv->avctx, AV_LOG_VERBOSE, "\tPulldown: %u\n",
00201            output->PicInfo.pulldown);
00202     av_log(priv->avctx, AV_LOG_VERBOSE, "\tFlags: 0x%08x\n",
00203            output->PicInfo.flags);
00204     av_log(priv->avctx, AV_LOG_VERBOSE, "\tFrame Rate/Res: %u\n",
00205            output->PicInfo.frame_rate);
00206     av_log(priv->avctx, AV_LOG_VERBOSE, "\tAspect Ratio: %u\n",
00207            output->PicInfo.aspect_ratio);
00208     av_log(priv->avctx, AV_LOG_VERBOSE, "\tColor Primaries: %u\n",
00209            output->PicInfo.colour_primaries);
00210     av_log(priv->avctx, AV_LOG_VERBOSE, "\tMetaData: %u\n",
00211            output->PicInfo.picture_meta_payload);
00212     av_log(priv->avctx, AV_LOG_VERBOSE, "\tSession Number: %u\n",
00213            output->PicInfo.sess_num);
00214     av_log(priv->avctx, AV_LOG_VERBOSE, "\tycom: %u\n",
00215            output->PicInfo.ycom);
00216     av_log(priv->avctx, AV_LOG_VERBOSE, "\tCustom Aspect: %u\n",
00217            output->PicInfo.custom_aspect_ratio_width_height);
00218     av_log(priv->avctx, AV_LOG_VERBOSE, "\tFrames to Drop: %u\n",
00219            output->PicInfo.n_drop);
00220     av_log(priv->avctx, AV_LOG_VERBOSE, "\tH264 Valid Fields: 0x%08x\n",
00221            output->PicInfo.other.h264.valid);
00222 }
00223 
00224 
00225 /*****************************************************************************
00226  * OpaqueList functions
00227  ****************************************************************************/
00228 
00229 static uint64_t opaque_list_push(CHDContext *priv, uint64_t reordered_opaque,
00230                                  uint8_t pic_type)
00231 {
00232     OpaqueList *newNode = av_mallocz(sizeof (OpaqueList));
00233     if (!newNode) {
00234         av_log(priv->avctx, AV_LOG_ERROR,
00235                "Unable to allocate new node in OpaqueList.\n");
00236         return 0;
00237     }
00238     if (!priv->head) {
00239         newNode->fake_timestamp = TIMESTAMP_UNIT;
00240         priv->head              = newNode;
00241     } else {
00242         newNode->fake_timestamp = priv->tail->fake_timestamp + TIMESTAMP_UNIT;
00243         priv->tail->next        = newNode;
00244     }
00245     priv->tail = newNode;
00246     newNode->reordered_opaque = reordered_opaque;
00247     newNode->pic_type = pic_type;
00248 
00249     return newNode->fake_timestamp;
00250 }
00251 
00252 /*
00253  * The OpaqueList is built in decode order, while elements will be removed
00254  * in presentation order. If frames are reordered, this means we must be
00255  * able to remove elements that are not the first element.
00256  *
00257  * Returned node must be freed by caller.
00258  */
00259 static OpaqueList *opaque_list_pop(CHDContext *priv, uint64_t fake_timestamp)
00260 {
00261     OpaqueList *node = priv->head;
00262 
00263     if (!priv->head) {
00264         av_log(priv->avctx, AV_LOG_ERROR,
00265                "CrystalHD: Attempted to query non-existent timestamps.\n");
00266         return NULL;
00267     }
00268 
00269     /*
00270      * The first element is special-cased because we have to manipulate
00271      * the head pointer rather than the previous element in the list.
00272      */
00273     if (priv->head->fake_timestamp == fake_timestamp) {
00274         priv->head = node->next;
00275 
00276         if (!priv->head->next)
00277             priv->tail = priv->head;
00278 
00279         node->next = NULL;
00280         return node;
00281     }
00282 
00283     /*
00284      * The list is processed at arm's length so that we have the
00285      * previous element available to rewrite its next pointer.
00286      */
00287     while (node->next) {
00288         OpaqueList *current = node->next;
00289         if (current->fake_timestamp == fake_timestamp) {
00290             node->next = current->next;
00291 
00292             if (!node->next)
00293                priv->tail = node;
00294 
00295             current->next = NULL;
00296             return current;
00297         } else {
00298             node = current;
00299         }
00300     }
00301 
00302     av_log(priv->avctx, AV_LOG_VERBOSE,
00303            "CrystalHD: Couldn't match fake_timestamp.\n");
00304     return NULL;
00305 }
00306 
00307 
00308 /*****************************************************************************
00309  * Video decoder API function definitions
00310  ****************************************************************************/
00311 
00312 static void flush(AVCodecContext *avctx)
00313 {
00314     CHDContext *priv = avctx->priv_data;
00315 
00316     avctx->has_b_frames     = 0;
00317     priv->last_picture      = -1;
00318     priv->output_ready      = 0;
00319     priv->need_second_field = 0;
00320     priv->skip_next_output  = 0;
00321     priv->decode_wait       = BASE_WAIT;
00322 
00323     if (priv->pic.data[0])
00324         avctx->release_buffer(avctx, &priv->pic);
00325 
00326     /* Flush mode 4 flushes all software and hardware buffers. */
00327     DtsFlushInput(priv->dev, 4);
00328 }
00329 
00330 
00331 static av_cold int uninit(AVCodecContext *avctx)
00332 {
00333     CHDContext *priv = avctx->priv_data;
00334     HANDLE device;
00335 
00336     device = priv->dev;
00337     DtsStopDecoder(device);
00338     DtsCloseDecoder(device);
00339     DtsDeviceClose(device);
00340 
00341     av_parser_close(priv->parser);
00342     if (priv->bsfc) {
00343         av_bitstream_filter_close(priv->bsfc);
00344     }
00345 
00346     av_free(priv->sps_pps_buf);
00347 
00348     if (priv->pic.data[0])
00349         avctx->release_buffer(avctx, &priv->pic);
00350 
00351     if (priv->head) {
00352        OpaqueList *node = priv->head;
00353        while (node) {
00354           OpaqueList *next = node->next;
00355           av_free(node);
00356           node = next;
00357        }
00358     }
00359 
00360     return 0;
00361 }
00362 
00363 
00364 static av_cold int init(AVCodecContext *avctx)
00365 {
00366     CHDContext* priv;
00367     BC_STATUS ret;
00368     BC_INFO_CRYSTAL version;
00369     BC_INPUT_FORMAT format = {
00370         .FGTEnable   = FALSE,
00371         .Progressive = TRUE,
00372         .OptFlags    = 0x80000000 | vdecFrameRate59_94 | 0x40,
00373         .width       = avctx->width,
00374         .height      = avctx->height,
00375     };
00376 
00377     BC_MEDIA_SUBTYPE subtype;
00378 
00379     uint32_t mode = DTS_PLAYBACK_MODE |
00380                     DTS_LOAD_FILE_PLAY_FW |
00381                     DTS_SKIP_TX_CHK_CPB |
00382                     DTS_PLAYBACK_DROP_RPT_MODE |
00383                     DTS_SINGLE_THREADED_MODE |
00384                     DTS_DFLT_RESOLUTION(vdecRESOLUTION_1080p23_976);
00385 
00386     av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
00387            avctx->codec->name);
00388 
00389     avctx->pix_fmt = PIX_FMT_YUYV422;
00390 
00391     /* Initialize the library */
00392     priv               = avctx->priv_data;
00393     priv->avctx        = avctx;
00394     priv->is_nal       = avctx->extradata_size > 0 && *(avctx->extradata) == 1;
00395     priv->last_picture = -1;
00396     priv->decode_wait  = BASE_WAIT;
00397 
00398     subtype = id2subtype(priv, avctx->codec->id);
00399     switch (subtype) {
00400     case BC_MSUBTYPE_AVC1:
00401         {
00402             uint8_t *dummy_p;
00403             int dummy_int;
00404 
00405             uint32_t orig_data_size = avctx->extradata_size;
00406             uint8_t *orig_data = av_malloc(orig_data_size);
00407             if (!orig_data) {
00408                 av_log(avctx, AV_LOG_ERROR,
00409                        "Failed to allocate copy of extradata\n");
00410                 return AVERROR(ENOMEM);
00411             }
00412             memcpy(orig_data, avctx->extradata, orig_data_size);
00413 
00414 
00415             priv->bsfc = av_bitstream_filter_init("h264_mp4toannexb");
00416             if (!priv->bsfc) {
00417                 av_log(avctx, AV_LOG_ERROR,
00418                        "Cannot open the h264_mp4toannexb BSF!\n");
00419                 av_free(orig_data);
00420                 return AVERROR_BSF_NOT_FOUND;
00421             }
00422             av_bitstream_filter_filter(priv->bsfc, avctx, NULL, &dummy_p,
00423                                        &dummy_int, NULL, 0, 0);
00424 
00425             priv->sps_pps_buf     = avctx->extradata;
00426             priv->sps_pps_size    = avctx->extradata_size;
00427             avctx->extradata      = orig_data;
00428             avctx->extradata_size = orig_data_size;
00429 
00430             format.pMetaData   = priv->sps_pps_buf;
00431             format.metaDataSz  = priv->sps_pps_size;
00432             format.startCodeSz = (avctx->extradata[4] & 0x03) + 1;
00433         }
00434         break;
00435     case BC_MSUBTYPE_H264:
00436         format.startCodeSz = 4;
00437         // Fall-through
00438     case BC_MSUBTYPE_VC1:
00439     case BC_MSUBTYPE_WVC1:
00440     case BC_MSUBTYPE_WMV3:
00441     case BC_MSUBTYPE_WMVA:
00442     case BC_MSUBTYPE_MPEG2VIDEO:
00443     case BC_MSUBTYPE_DIVX:
00444     case BC_MSUBTYPE_DIVX311:
00445         format.pMetaData  = avctx->extradata;
00446         format.metaDataSz = avctx->extradata_size;
00447         break;
00448     default:
00449         av_log(avctx, AV_LOG_ERROR, "CrystalHD: Unknown codec name\n");
00450         return AVERROR(EINVAL);
00451     }
00452     format.mSubtype = subtype;
00453 
00454     if (priv->sWidth) {
00455         format.bEnableScaling = 1;
00456         format.ScalingParams.sWidth = priv->sWidth;
00457     }
00458 
00459     /* Get a decoder instance */
00460     av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: starting up\n");
00461     // Initialize the Link and Decoder devices
00462     ret = DtsDeviceOpen(&priv->dev, mode);
00463     if (ret != BC_STS_SUCCESS) {
00464         av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: DtsDeviceOpen failed\n");
00465         goto fail;
00466     }
00467 
00468     ret = DtsCrystalHDVersion(priv->dev, &version);
00469     if (ret != BC_STS_SUCCESS) {
00470         av_log(avctx, AV_LOG_VERBOSE,
00471                "CrystalHD: DtsCrystalHDVersion failed\n");
00472         goto fail;
00473     }
00474     priv->is_70012 = version.device == 0;
00475 
00476     if (priv->is_70012 &&
00477         (subtype == BC_MSUBTYPE_DIVX || subtype == BC_MSUBTYPE_DIVX311)) {
00478         av_log(avctx, AV_LOG_VERBOSE,
00479                "CrystalHD: BCM70012 doesn't support MPEG4-ASP/DivX/Xvid\n");
00480         goto fail;
00481     }
00482 
00483     ret = DtsSetInputFormat(priv->dev, &format);
00484     if (ret != BC_STS_SUCCESS) {
00485         av_log(avctx, AV_LOG_ERROR, "CrystalHD: SetInputFormat failed\n");
00486         goto fail;
00487     }
00488 
00489     ret = DtsOpenDecoder(priv->dev, BC_STREAM_TYPE_ES);
00490     if (ret != BC_STS_SUCCESS) {
00491         av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsOpenDecoder failed\n");
00492         goto fail;
00493     }
00494 
00495     ret = DtsSetColorSpace(priv->dev, OUTPUT_MODE422_YUY2);
00496     if (ret != BC_STS_SUCCESS) {
00497         av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsSetColorSpace failed\n");
00498         goto fail;
00499     }
00500     ret = DtsStartDecoder(priv->dev);
00501     if (ret != BC_STS_SUCCESS) {
00502         av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartDecoder failed\n");
00503         goto fail;
00504     }
00505     ret = DtsStartCapture(priv->dev);
00506     if (ret != BC_STS_SUCCESS) {
00507         av_log(avctx, AV_LOG_ERROR, "CrystalHD: DtsStartCapture failed\n");
00508         goto fail;
00509     }
00510 
00511     if (avctx->codec->id == CODEC_ID_H264) {
00512         priv->parser = av_parser_init(avctx->codec->id);
00513         if (!priv->parser)
00514             av_log(avctx, AV_LOG_WARNING,
00515                    "Cannot open the h.264 parser! Interlaced h.264 content "
00516                    "will not be detected reliably.\n");
00517         priv->parser->flags = PARSER_FLAG_COMPLETE_FRAMES;
00518     }
00519     av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Init complete.\n");
00520 
00521     return 0;
00522 
00523  fail:
00524     uninit(avctx);
00525     return -1;
00526 }
00527 
00528 
00529 static inline CopyRet copy_frame(AVCodecContext *avctx,
00530                                  BC_DTS_PROC_OUT *output,
00531                                  void *data, int *data_size)
00532 {
00533     BC_STATUS ret;
00534     BC_DTS_STATUS decoder_status;
00535     uint8_t trust_interlaced;
00536     uint8_t interlaced;
00537 
00538     CHDContext *priv = avctx->priv_data;
00539     int64_t pkt_pts  = AV_NOPTS_VALUE;
00540     uint8_t pic_type = 0;
00541 
00542     uint8_t bottom_field = (output->PicInfo.flags & VDEC_FLAG_BOTTOMFIELD) ==
00543                            VDEC_FLAG_BOTTOMFIELD;
00544     uint8_t bottom_first = !!(output->PicInfo.flags & VDEC_FLAG_BOTTOM_FIRST);
00545 
00546     int width    = output->PicInfo.width;
00547     int height   = output->PicInfo.height;
00548     int bwidth;
00549     uint8_t *src = output->Ybuff;
00550     int sStride;
00551     uint8_t *dst;
00552     int dStride;
00553 
00554     if (output->PicInfo.timeStamp != 0) {
00555         OpaqueList *node = opaque_list_pop(priv, output->PicInfo.timeStamp);
00556         if (node) {
00557             pkt_pts = node->reordered_opaque;
00558             pic_type = node->pic_type;
00559             av_free(node);
00560         } else {
00561             /*
00562              * We will encounter a situation where a timestamp cannot be
00563              * popped if a second field is being returned. In this case,
00564              * each field has the same timestamp and the first one will
00565              * cause it to be popped. To keep subsequent calculations
00566              * simple, pic_type should be set a FIELD value - doesn't
00567              * matter which, but I chose BOTTOM.
00568              */
00569             pic_type = PICT_BOTTOM_FIELD;
00570         }
00571         av_log(avctx, AV_LOG_VERBOSE, "output \"pts\": %"PRIu64"\n",
00572                output->PicInfo.timeStamp);
00573         av_log(avctx, AV_LOG_VERBOSE, "output picture type %d\n",
00574                pic_type);
00575     }
00576 
00577     ret = DtsGetDriverStatus(priv->dev, &decoder_status);
00578     if (ret != BC_STS_SUCCESS) {
00579         av_log(avctx, AV_LOG_ERROR,
00580                "CrystalHD: GetDriverStatus failed: %u\n", ret);
00581        return RET_ERROR;
00582     }
00583 
00584     /*
00585      * For most content, we can trust the interlaced flag returned
00586      * by the hardware, but sometimes we can't. These are the
00587      * conditions under which we can trust the flag:
00588      *
00589      * 1) It's not h.264 content
00590      * 2) The UNKNOWN_SRC flag is not set
00591      * 3) We know we're expecting a second field
00592      * 4) The hardware reports this picture and the next picture
00593      *    have the same picture number.
00594      *
00595      * Note that there can still be interlaced content that will
00596      * fail this check, if the hardware hasn't decoded the next
00597      * picture or if there is a corruption in the stream. (In either
00598      * case a 0 will be returned for the next picture number)
00599      */
00600     trust_interlaced = avctx->codec->id != CODEC_ID_H264 ||
00601                        !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ||
00602                        priv->need_second_field ||
00603                        (decoder_status.picNumFlags & ~0x40000000) ==
00604                        output->PicInfo.picture_number;
00605 
00606     /*
00607      * If we got a false negative for trust_interlaced on the first field,
00608      * we will realise our mistake here when we see that the picture number is that
00609      * of the previous picture. We cannot recover the frame and should discard the
00610      * second field to keep the correct number of output frames.
00611      */
00612     if (output->PicInfo.picture_number == priv->last_picture && !priv->need_second_field) {
00613         av_log(avctx, AV_LOG_WARNING,
00614                "Incorrectly guessed progressive frame. Discarding second field\n");
00615         /* Returning without providing a picture. */
00616         return RET_OK;
00617     }
00618 
00619     interlaced = (output->PicInfo.flags & VDEC_FLAG_INTERLACED_SRC) &&
00620                  trust_interlaced;
00621 
00622     if (!trust_interlaced && (decoder_status.picNumFlags & ~0x40000000) == 0) {
00623         av_log(avctx, AV_LOG_VERBOSE,
00624                "Next picture number unknown. Assuming progressive frame.\n");
00625     }
00626 
00627     av_log(avctx, AV_LOG_VERBOSE, "Interlaced state: %d | trust_interlaced %d\n",
00628            interlaced, trust_interlaced);
00629 
00630     if (priv->pic.data[0] && !priv->need_second_field)
00631         avctx->release_buffer(avctx, &priv->pic);
00632 
00633     priv->need_second_field = interlaced && !priv->need_second_field;
00634 
00635     priv->pic.buffer_hints = FF_BUFFER_HINTS_VALID | FF_BUFFER_HINTS_PRESERVE |
00636                              FF_BUFFER_HINTS_REUSABLE;
00637     if (!priv->pic.data[0]) {
00638         if (avctx->get_buffer(avctx, &priv->pic) < 0) {
00639             av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
00640             return RET_ERROR;
00641         }
00642     }
00643 
00644     bwidth = av_image_get_linesize(avctx->pix_fmt, width, 0);
00645     if (priv->is_70012) {
00646         int pStride;
00647 
00648         if (width <= 720)
00649             pStride = 720;
00650         else if (width <= 1280)
00651             pStride = 1280;
00652         else if (width <= 1080)
00653             pStride = 1080;
00654         sStride = av_image_get_linesize(avctx->pix_fmt, pStride, 0);
00655     } else {
00656         sStride = bwidth;
00657     }
00658 
00659     dStride = priv->pic.linesize[0];
00660     dst     = priv->pic.data[0];
00661 
00662     av_log(priv->avctx, AV_LOG_VERBOSE, "CrystalHD: Copying out frame\n");
00663 
00664     if (interlaced) {
00665         int dY = 0;
00666         int sY = 0;
00667 
00668         height /= 2;
00669         if (bottom_field) {
00670             av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: bottom field\n");
00671             dY = 1;
00672         } else {
00673             av_log(priv->avctx, AV_LOG_VERBOSE, "Interlaced: top field\n");
00674             dY = 0;
00675         }
00676 
00677         for (sY = 0; sY < height; dY++, sY++) {
00678             memcpy(&(dst[dY * dStride]), &(src[sY * sStride]), bwidth);
00679             dY++;
00680         }
00681     } else {
00682         av_image_copy_plane(dst, dStride, src, sStride, bwidth, height);
00683     }
00684 
00685     priv->pic.interlaced_frame = interlaced;
00686     if (interlaced)
00687         priv->pic.top_field_first = !bottom_first;
00688 
00689     priv->pic.pkt_pts = pkt_pts;
00690 
00691     if (!priv->need_second_field) {
00692         *data_size       = sizeof(AVFrame);
00693         *(AVFrame *)data = priv->pic;
00694     }
00695 
00696     /*
00697      * Two types of PAFF content have been observed. One form causes the
00698      * hardware to return a field pair and the other individual fields,
00699      * even though the input is always individual fields. We must skip
00700      * copying on the next decode() call to maintain pipeline length in
00701      * the first case.
00702      */
00703     if (!interlaced && (output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) &&
00704         (pic_type == PICT_TOP_FIELD || pic_type == PICT_BOTTOM_FIELD)) {
00705         av_log(priv->avctx, AV_LOG_VERBOSE, "Fieldpair from two packets.\n");
00706         return RET_SKIP_NEXT_COPY;
00707     }
00708 
00709     /*
00710      * Testing has shown that in all cases where we don't want to return the
00711      * full frame immediately, VDEC_FLAG_UNKNOWN_SRC is set.
00712      */
00713     return priv->need_second_field &&
00714            !(output->PicInfo.flags & VDEC_FLAG_UNKNOWN_SRC) ?
00715            RET_COPY_NEXT_FIELD : RET_OK;
00716 }
00717 
00718 
00719 static inline CopyRet receive_frame(AVCodecContext *avctx,
00720                                     void *data, int *data_size)
00721 {
00722     BC_STATUS ret;
00723     BC_DTS_PROC_OUT output = {
00724         .PicInfo.width  = avctx->width,
00725         .PicInfo.height = avctx->height,
00726     };
00727     CHDContext *priv = avctx->priv_data;
00728     HANDLE dev       = priv->dev;
00729 
00730     *data_size = 0;
00731 
00732     // Request decoded data from the driver
00733     ret = DtsProcOutputNoCopy(dev, OUTPUT_PROC_TIMEOUT, &output);
00734     if (ret == BC_STS_FMT_CHANGE) {
00735         av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Initial format change\n");
00736         avctx->width  = output.PicInfo.width;
00737         avctx->height = output.PicInfo.height;
00738         return RET_COPY_AGAIN;
00739     } else if (ret == BC_STS_SUCCESS) {
00740         int copy_ret = -1;
00741         if (output.PoutFlags & BC_POUT_FLAGS_PIB_VALID) {
00742             if (priv->last_picture == -1) {
00743                 /*
00744                  * Init to one less, so that the incrementing code doesn't
00745                  * need to be special-cased.
00746                  */
00747                 priv->last_picture = output.PicInfo.picture_number - 1;
00748             }
00749 
00750             if (avctx->codec->id == CODEC_ID_MPEG4 &&
00751                 output.PicInfo.timeStamp == 0 && priv->bframe_bug) {
00752                 av_log(avctx, AV_LOG_VERBOSE,
00753                        "CrystalHD: Not returning packed frame twice.\n");
00754                 priv->last_picture++;
00755                 DtsReleaseOutputBuffs(dev, NULL, FALSE);
00756                 return RET_COPY_AGAIN;
00757             }
00758 
00759             print_frame_info(priv, &output);
00760 
00761             if (priv->last_picture + 1 < output.PicInfo.picture_number) {
00762                 av_log(avctx, AV_LOG_WARNING,
00763                        "CrystalHD: Picture Number discontinuity\n");
00764                 /*
00765                  * Have we lost frames? If so, we need to shrink the
00766                  * pipeline length appropriately.
00767                  *
00768                  * XXX: I have no idea what the semantics of this situation
00769                  * are so I don't even know if we've lost frames or which
00770                  * ones.
00771                  *
00772                  * In any case, only warn the first time.
00773                  */
00774                priv->last_picture = output.PicInfo.picture_number - 1;
00775             }
00776 
00777             copy_ret = copy_frame(avctx, &output, data, data_size);
00778             if (*data_size > 0) {
00779                 avctx->has_b_frames--;
00780                 priv->last_picture++;
00781                 av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Pipeline length: %u\n",
00782                        avctx->has_b_frames);
00783             }
00784         } else {
00785             /*
00786              * An invalid frame has been consumed.
00787              */
00788             av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput succeeded with "
00789                                         "invalid PIB\n");
00790             avctx->has_b_frames--;
00791             copy_ret = RET_OK;
00792         }
00793         DtsReleaseOutputBuffs(dev, NULL, FALSE);
00794 
00795         return copy_ret;
00796     } else if (ret == BC_STS_BUSY) {
00797         return RET_COPY_AGAIN;
00798     } else {
00799         av_log(avctx, AV_LOG_ERROR, "CrystalHD: ProcOutput failed %d\n", ret);
00800         return RET_ERROR;
00801     }
00802 }
00803 
00804 
00805 static int decode(AVCodecContext *avctx, void *data, int *data_size, AVPacket *avpkt)
00806 {
00807     BC_STATUS ret;
00808     BC_DTS_STATUS decoder_status;
00809     CopyRet rec_ret;
00810     CHDContext *priv   = avctx->priv_data;
00811     HANDLE dev         = priv->dev;
00812     int len            = avpkt->size;
00813     uint8_t pic_type   = 0;
00814 
00815     av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: decode_frame\n");
00816 
00817     if (avpkt->size == 7 && !priv->bframe_bug) {
00818         /*
00819          * The use of a drop frame triggers the bug
00820          */
00821         av_log(avctx, AV_LOG_INFO,
00822                "CrystalHD: Enabling work-around for packed b-frame bug\n");
00823         priv->bframe_bug = 1;
00824     } else if (avpkt->size == 8 && priv->bframe_bug) {
00825         /*
00826          * Delay frames don't trigger the bug
00827          */
00828         av_log(avctx, AV_LOG_INFO,
00829                "CrystalHD: Disabling work-around for packed b-frame bug\n");
00830         priv->bframe_bug = 0;
00831     }
00832 
00833     if (len) {
00834         int32_t tx_free = (int32_t)DtsTxFreeSize(dev);
00835 
00836         if (priv->parser) {
00837             uint8_t *in_data = avpkt->data;
00838             int in_len = len;
00839             int ret = 0;
00840 
00841             if (priv->bsfc) {
00842                 ret = av_bitstream_filter_filter(priv->bsfc, avctx, NULL,
00843                                                  &in_data, &in_len,
00844                                                  avpkt->data, len, 0);
00845             }
00846 
00847             if (ret >= 0) {
00848                 uint8_t *pout;
00849                 int psize;
00850                 int index;
00851                 H264Context *h = priv->parser->priv_data;
00852 
00853                 index = av_parser_parse2(priv->parser, avctx, &pout, &psize,
00854                                          in_data, in_len, avctx->pkt->pts,
00855                                          avctx->pkt->dts, 0);
00856                 if (index < 0) {
00857                     av_log(avctx, AV_LOG_WARNING,
00858                            "CrystalHD: Failed to parse h.264 packet to "
00859                            "detect interlacing.\n");
00860                 } else if (index != in_len) {
00861                     av_log(avctx, AV_LOG_WARNING,
00862                            "CrystalHD: Failed to parse h.264 packet "
00863                            "completely. Interlaced frames may be "
00864                            "incorrectly detected\n.");
00865                 } else {
00866                     av_log(avctx, AV_LOG_VERBOSE,
00867                            "CrystalHD: parser picture type %d\n",
00868                            h->s.picture_structure);
00869                     pic_type = h->s.picture_structure;
00870                 }
00871             } else {
00872                 av_log(avctx, AV_LOG_WARNING,
00873                        "CrystalHD: mp4toannexb filter failed to filter "
00874                        "packet. Interlaced frames may be incorrectly "
00875                        "detected.\n");
00876             }
00877             if (ret > 0) {
00878                 av_freep(&in_data);
00879             }
00880         }
00881 
00882         if (len < tx_free - 1024) {
00883             /*
00884              * Despite being notionally opaque, either libcrystalhd or
00885              * the hardware itself will mangle pts values that are too
00886              * small or too large. The docs claim it should be in units
00887              * of 100ns. Given that we're nominally dealing with a black
00888              * box on both sides, any transform we do has no guarantee of
00889              * avoiding mangling so we need to build a mapping to values
00890              * we know will not be mangled.
00891              */
00892             uint64_t pts = opaque_list_push(priv, avctx->pkt->pts, pic_type);
00893             if (!pts) {
00894                 return AVERROR(ENOMEM);
00895             }
00896             av_log(priv->avctx, AV_LOG_VERBOSE,
00897                    "input \"pts\": %"PRIu64"\n", pts);
00898             ret = DtsProcInput(dev, avpkt->data, len, pts, 0);
00899             if (ret == BC_STS_BUSY) {
00900                 av_log(avctx, AV_LOG_WARNING,
00901                        "CrystalHD: ProcInput returned busy\n");
00902                 usleep(BASE_WAIT);
00903                 return AVERROR(EBUSY);
00904             } else if (ret != BC_STS_SUCCESS) {
00905                 av_log(avctx, AV_LOG_ERROR,
00906                        "CrystalHD: ProcInput failed: %u\n", ret);
00907                 return -1;
00908             }
00909             avctx->has_b_frames++;
00910         } else {
00911             av_log(avctx, AV_LOG_WARNING, "CrystalHD: Input buffer full\n");
00912             len = 0; // We didn't consume any bytes.
00913         }
00914     } else {
00915         av_log(avctx, AV_LOG_INFO, "CrystalHD: No more input data\n");
00916     }
00917 
00918     if (priv->skip_next_output) {
00919         av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Skipping next output.\n");
00920         priv->skip_next_output = 0;
00921         avctx->has_b_frames--;
00922         return len;
00923     }
00924 
00925     ret = DtsGetDriverStatus(dev, &decoder_status);
00926     if (ret != BC_STS_SUCCESS) {
00927         av_log(avctx, AV_LOG_ERROR, "CrystalHD: GetDriverStatus failed\n");
00928         return -1;
00929     }
00930 
00931     /*
00932      * No frames ready. Don't try to extract.
00933      *
00934      * Empirical testing shows that ReadyListCount can be a damn lie,
00935      * and ProcOut still fails when count > 0. The same testing showed
00936      * that two more iterations were needed before ProcOutput would
00937      * succeed.
00938      */
00939     if (priv->output_ready < 2) {
00940         if (decoder_status.ReadyListCount != 0)
00941             priv->output_ready++;
00942         usleep(BASE_WAIT);
00943         av_log(avctx, AV_LOG_INFO, "CrystalHD: Filling pipeline.\n");
00944         return len;
00945     } else if (decoder_status.ReadyListCount == 0) {
00946         /*
00947          * After the pipeline is established, if we encounter a lack of frames
00948          * that probably means we're not giving the hardware enough time to
00949          * decode them, so start increasing the wait time at the end of a
00950          * decode call.
00951          */
00952         usleep(BASE_WAIT);
00953         priv->decode_wait += WAIT_UNIT;
00954         av_log(avctx, AV_LOG_INFO, "CrystalHD: No frames ready. Returning\n");
00955         return len;
00956     }
00957 
00958     do {
00959         rec_ret = receive_frame(avctx, data, data_size);
00960         if (rec_ret == RET_OK && *data_size == 0) {
00961             /*
00962              * This case is for when the encoded fields are stored
00963              * separately and we get a separate avpkt for each one. To keep
00964              * the pipeline stable, we should return nothing and wait for
00965              * the next time round to grab the second field.
00966              * H.264 PAFF is an example of this.
00967              */
00968             av_log(avctx, AV_LOG_VERBOSE, "Returning after first field.\n");
00969             avctx->has_b_frames--;
00970         } else if (rec_ret == RET_COPY_NEXT_FIELD) {
00971             /*
00972              * This case is for when the encoded fields are stored in a
00973              * single avpkt but the hardware returns then separately. Unless
00974              * we grab the second field before returning, we'll slip another
00975              * frame in the pipeline and if that happens a lot, we're sunk.
00976              * So we have to get that second field now.
00977              * Interlaced mpeg2 and vc1 are examples of this.
00978              */
00979             av_log(avctx, AV_LOG_VERBOSE, "Trying to get second field.\n");
00980             while (1) {
00981                 usleep(priv->decode_wait);
00982                 ret = DtsGetDriverStatus(dev, &decoder_status);
00983                 if (ret == BC_STS_SUCCESS &&
00984                     decoder_status.ReadyListCount > 0) {
00985                     rec_ret = receive_frame(avctx, data, data_size);
00986                     if ((rec_ret == RET_OK && *data_size > 0) ||
00987                         rec_ret == RET_ERROR)
00988                         break;
00989                 }
00990             }
00991             av_log(avctx, AV_LOG_VERBOSE, "CrystalHD: Got second field.\n");
00992         } else if (rec_ret == RET_SKIP_NEXT_COPY) {
00993             /*
00994              * Two input packets got turned into a field pair. Gawd.
00995              */
00996             av_log(avctx, AV_LOG_VERBOSE,
00997                    "Don't output on next decode call.\n");
00998             priv->skip_next_output = 1;
00999         }
01000         /*
01001          * If rec_ret == RET_COPY_AGAIN, that means that either we just handled
01002          * a FMT_CHANGE event and need to go around again for the actual frame,
01003          * we got a busy status and need to try again, or we're dealing with
01004          * packed b-frames, where the hardware strangely returns the packed
01005          * p-frame twice. We choose to keep the second copy as it carries the
01006          * valid pts.
01007          */
01008     } while (rec_ret == RET_COPY_AGAIN);
01009     usleep(priv->decode_wait);
01010     return len;
01011 }
01012 
01013 
01014 #if CONFIG_H264_CRYSTALHD_DECODER
01015 static AVClass h264_class = {
01016     "h264_crystalhd",
01017     av_default_item_name,
01018     options,
01019     LIBAVUTIL_VERSION_INT,
01020 };
01021 
01022 AVCodec ff_h264_crystalhd_decoder = {
01023     .name           = "h264_crystalhd",
01024     .type           = AVMEDIA_TYPE_VIDEO,
01025     .id             = CODEC_ID_H264,
01026     .priv_data_size = sizeof(CHDContext),
01027     .init           = init,
01028     .close          = uninit,
01029     .decode         = decode,
01030     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01031     .flush          = flush,
01032     .long_name      = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (CrystalHD acceleration)"),
01033     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01034     .priv_class     = &h264_class,
01035 };
01036 #endif
01037 
01038 #if CONFIG_MPEG2_CRYSTALHD_DECODER
01039 static AVClass mpeg2_class = {
01040     "mpeg2_crystalhd",
01041     av_default_item_name,
01042     options,
01043     LIBAVUTIL_VERSION_INT,
01044 };
01045 
01046 AVCodec ff_mpeg2_crystalhd_decoder = {
01047     .name           = "mpeg2_crystalhd",
01048     .type           = AVMEDIA_TYPE_VIDEO,
01049     .id             = CODEC_ID_MPEG2VIDEO,
01050     .priv_data_size = sizeof(CHDContext),
01051     .init           = init,
01052     .close          = uninit,
01053     .decode         = decode,
01054     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01055     .flush          = flush,
01056     .long_name      = NULL_IF_CONFIG_SMALL("MPEG-2 Video (CrystalHD acceleration)"),
01057     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01058     .priv_class     = &mpeg2_class,
01059 };
01060 #endif
01061 
01062 #if CONFIG_MPEG4_CRYSTALHD_DECODER
01063 static AVClass mpeg4_class = {
01064     "mpeg4_crystalhd",
01065     av_default_item_name,
01066     options,
01067     LIBAVUTIL_VERSION_INT,
01068 };
01069 
01070 AVCodec ff_mpeg4_crystalhd_decoder = {
01071     .name           = "mpeg4_crystalhd",
01072     .type           = AVMEDIA_TYPE_VIDEO,
01073     .id             = CODEC_ID_MPEG4,
01074     .priv_data_size = sizeof(CHDContext),
01075     .init           = init,
01076     .close          = uninit,
01077     .decode         = decode,
01078     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01079     .flush          = flush,
01080     .long_name      = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 (CrystalHD acceleration)"),
01081     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01082     .priv_class     = &mpeg4_class,
01083 };
01084 #endif
01085 
01086 #if CONFIG_MSMPEG4_CRYSTALHD_DECODER
01087 static AVClass msmpeg4_class = {
01088     "msmpeg4_crystalhd",
01089     av_default_item_name,
01090     options,
01091     LIBAVUTIL_VERSION_INT,
01092 };
01093 
01094 AVCodec ff_msmpeg4_crystalhd_decoder = {
01095     .name           = "msmpeg4_crystalhd",
01096     .type           = AVMEDIA_TYPE_VIDEO,
01097     .id             = CODEC_ID_MSMPEG4V3,
01098     .priv_data_size = sizeof(CHDContext),
01099     .init           = init,
01100     .close          = uninit,
01101     .decode         = decode,
01102     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01103     .flush          = flush,
01104     .long_name      = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 Microsoft variant version 3 (CrystalHD acceleration)"),
01105     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01106     .priv_class     = &msmpeg4_class,
01107 };
01108 #endif
01109 
01110 #if CONFIG_VC1_CRYSTALHD_DECODER
01111 static AVClass vc1_class = {
01112     "vc1_crystalhd",
01113     av_default_item_name,
01114     options,
01115     LIBAVUTIL_VERSION_INT,
01116 };
01117 
01118 AVCodec ff_vc1_crystalhd_decoder = {
01119     .name           = "vc1_crystalhd",
01120     .type           = AVMEDIA_TYPE_VIDEO,
01121     .id             = CODEC_ID_VC1,
01122     .priv_data_size = sizeof(CHDContext),
01123     .init           = init,
01124     .close          = uninit,
01125     .decode         = decode,
01126     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01127     .flush          = flush,
01128     .long_name      = NULL_IF_CONFIG_SMALL("SMPTE VC-1 (CrystalHD acceleration)"),
01129     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01130     .priv_class     = &vc1_class,
01131 };
01132 #endif
01133 
01134 #if CONFIG_WMV3_CRYSTALHD_DECODER
01135 static AVClass wmv3_class = {
01136     "wmv3_crystalhd",
01137     av_default_item_name,
01138     options,
01139     LIBAVUTIL_VERSION_INT,
01140 };
01141 
01142 AVCodec ff_wmv3_crystalhd_decoder = {
01143     .name           = "wmv3_crystalhd",
01144     .type           = AVMEDIA_TYPE_VIDEO,
01145     .id             = CODEC_ID_WMV3,
01146     .priv_data_size = sizeof(CHDContext),
01147     .init           = init,
01148     .close          = uninit,
01149     .decode         = decode,
01150     .capabilities   = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
01151     .flush          = flush,
01152     .long_name      = NULL_IF_CONFIG_SMALL("Windows Media Video 9 (CrystalHD acceleration)"),
01153     .pix_fmts       = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
01154     .priv_class     = &wmv3_class,
01155 };
01156 #endif

Generated on Fri Feb 22 2013 07:24:25 for FFmpeg by  doxygen 1.7.1