FFmpegAVPacket剖析以及使用

知識準備
??????? AVPacket:存儲壓縮數(shù)據(視頻對應H.264等碼流數(shù)據,音頻對應AAC/MP3等碼流數(shù)據),簡單來說就是攜帶一個NAL視頻單元,或者多個NAL音頻單元。?AVPacket保存一個NAL單元的解碼前數(shù)據,該結構本身不直接包含數(shù)據,其有一個指向數(shù)據域的指針。傳遞給avcodec_send_packet函數(shù)的AVPacket結構體data中的數(shù)據前面是00 00 00 01開頭,說明是NALU格式的數(shù)據

創(chuàng)新互聯(lián)公司擁有十年成都網站建設工作經驗,為各大企業(yè)提供成都網站制作、做網站、外貿營銷網站建設服務,對于網頁設計、PC網站建設(電腦版網站建設)、app開發(fā)定制、wap網站建設(手機版網站建設)、程序開發(fā)、網站優(yōu)化(SEO優(yōu)化)、微網站、申請域名等,憑借多年來在互聯(lián)網的打拼,我們在互聯(lián)網網站建設行業(yè)積累了很多網站制作、網站設計、網絡營銷經驗,集策劃、開發(fā)、設計、營銷、管理等網站化運作于一體,具備承接各種規(guī)模類型的網站建設項目的能力。

重要結構體成員分析
??????? AVBufferRef *buf; //當前AVPacket中壓縮數(shù)據的引用計數(shù),以及保存壓縮數(shù)據的指針地址(壓縮數(shù)據申請的空間在這里)
??????? uint8_t *data;//保存壓縮數(shù)據的指針地址(data同時指向了buf中的data)
??????? int?? size;//壓縮數(shù)據的長度
??????? int?? stream_index;//視頻還是音頻的索引

實戰(zhàn)(構建包含一個NAL單元(長度為nLen)的AVPacket)
???? AVPacket pkt1, *packet? = &pkt1;
???? av_new_packet(packet, nLen);
???? memcpy(packet->data, data, nLen);
???? packet->size = nLen;
???? packet->stream_index = 0;
然后就可以將packet加入鏈表等待解碼出一幀數(shù)據,或者調用avcodec_decode_video2進行解碼,解碼之后,可以調用av_free_packet或者? av_packet_unref釋放資源

釋疑
1)為什么不直接對packet->data申請內存,然后進行數(shù)據的拷貝?按結構體中定義說明 AVBufferRef只是數(shù)據的引用計數(shù),可以為NULL,代表沒有任何的引用
所以上面的代碼修改(不推薦):
???????? av_init_packet(packet);//初始化結構體,尤其是AVBufferRef *buf,避免
???????? packet->data = (uint8_t *)malloc(sizeof(uint8_t)* nByte);
???????? memcpy(packet->data, data, nLen);
???????? packet->size = nLen;
???????? packet->stream_index = 0;
注意:av_init_packet(packet);//初始化結構體,尤其是AVBufferRef *buf,避免在解碼的時候,訪問到非法的指針地址。
不推薦:無法使用av_free_packet或者? av_packet_unref進行資源的釋放,必須手動釋放掉packet->data申請的內存,因為這兩個函數(shù)釋放資源針對的都是AVPacket結構體中的buf,而不是data
void av_free_packet(AVPacket *pkt)
{
??? if (pkt) {
??????? if (pkt->buf)
??????????? av_buffer_unref(&pkt->buf);
??????? pkt->data??????????? = NULL;
??????? pkt->size??????????? = 0;
??????? av_packet_free_side_data(pkt);
??? }
}
void av_packet_unref(AVPacket *pkt)
{
??? av_packet_free_side_data(pkt);
??? av_buffer_unref(&pkt->buf);
??? av_init_packet(pkt);
??? pkt->data = NULL;
??? pkt->size = 0;
}

2)為什么 AVBufferRef里面中的data跟外層的data指針是一樣的,但是size長度不一樣
根據
int av_new_packet(AVPacket *pkt, int size)
{
??? AVBufferRef *buf = NULL;
??? int ret = packet_alloc(&buf, size);
??? if (ret < 0)
??????? return ret;
??? av_init_packet(pkt);
??? pkt->buf????? = buf;
??? pkt->data???? = buf->data;
??? pkt->size???? = size;
??? return 0;
}
可以得知兩個data指針指向的是同一塊內存,但是在申請內存的時候,進行了字節(jié)的對齊
多申請AV_INPUT_BUFFER_PADDING_SIZE個字節(jié)的數(shù)據作為結尾的填充
void av_init_packet(AVPacket *pkt)
{
??? pkt->pts????????????????? = AV_NOPTS_VALUE;
??? pkt->dts????????????????? = AV_NOPTS_VALUE;
??? pkt->pos????????????????? = -1;
??? pkt->duration???????????? = 0;
#if FF_API_CONVERGENCE_DURATION
FF_DISABLE_DEPRECATION_WARNINGS
??? pkt->convergence_duration = 0;
FF_ENABLE_DEPRECATION_WARNINGS
#endif
??? pkt->flags??????????????? = 0;
??? pkt->stream_index???????? = 0;
??? pkt->buf????????????????? = NULL;
??? pkt->side_data??????????? = NULL;
??? pkt->side_data_elems????? = 0;
}
//AV_INPUT_BUFFER_PADDING_SIZE是為了進行數(shù)據的對齊,方便數(shù)據的訪問
static int packet_alloc(AVBufferRef **buf, int size)
{
??? int ret;
??? if (size < 0 || size >= INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE)
??????? return AVERROR(EINVAL);
??? ret = av_buffer_realloc(buf, size + AV_INPUT_BUFFER_PADDING_SIZE);
??? if (ret < 0)
??????? return ret;
??? memset((*buf)->data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
??? return 0;
}
創(chuàng)建一個AVBufferRef實例,然后申請size長度的內存分配給AVBufferRef實例中的data
int av_buffer_realloc(AVBufferRef **pbuf, int size)
{
??? AVBufferRef *buf = *pbuf;
??? uint8_t *tmp;
??? if (!buf) {
??????? /* allocate a new buffer with av_realloc(), so it will be reallocatable
???????? * later */
??????? uint8_t *data = av_realloc(NULL, size);
??????? if (!data)
??????????? return AVERROR(ENOMEM);
??????? buf = av_buffer_create(data, size, av_buffer_default_free, NULL, 0);
??????? if (!buf) {
??????????? av_freep(&data);
??????????? return AVERROR(ENOMEM);
??????? }
??????? buf->buffer->flags |= BUFFER_FLAG_REALLOCATABLE;
??????? *pbuf = buf;
??????? return 0;
??? } else if (buf->size == size)
??????? return 0;
??? if (!(buf->buffer->flags & BUFFER_FLAG_REALLOCATABLE) ||
??????? !av_buffer_is_writable(buf) || buf->data != buf->buffer->data) {
??????? /* cannot realloc, allocate a new reallocable buffer and copy data */
??????? AVBufferRef *new = NULL;
??????? av_buffer_realloc(&new, size);
??????? if (!new)
??????????? return AVERROR(ENOMEM);
??????? memcpy(new->data, buf->data, FFMIN(size, buf->size));
??????? buffer_replace(pbuf, &new);
??????? return 0;
??? }
??? tmp = av_realloc(buf->buffer->data, size);
??? if (!tmp)
??????? return AVERROR(ENOMEM);
??? buf->buffer->data = buf->data = tmp;
??? buf->buffer->size = buf->size = size;
??? return 0;
}
//釋放AVBufferRef申請內存
void av_buffer_unref(AVBufferRef **buf)
{
??? if (!buf || !*buf)
??????? return;
??? buffer_replace(buf, NULL);
}
static void buffer_replace(AVBufferRef **dst, AVBufferRef **src)
{
??? AVBuffer *b;
??? b = (*dst)->buffer;
??? if (src) {
??????? **dst = **src;
??????? av_freep(src);
??? } else
??????? av_freep(dst);
??? if (atomic_fetch_add_explicit(&b->refcount, -1, memory_order_acq_rel) == 1) {
??????? b->free(b->opaque, b->data);
??????? av_freep(&b);
??? }
}

3)

數(shù)據簡單如下:
00 00 00 01 61 e1 40 01 58 2b fb 22 ff 29 7b 3f 6f 67 2f 29 fa 25 53 68 78 46 b1

在調用avcodec_send_packet函數(shù)的時候打印錯誤如下:
I:2018-01-06 15:06:05 ms:887:nal_unit_type: 1, nal_ref_idc: 3
I:2018-01-06 15:06:05 ms:888:non-existing PPS 0 referenced
I:2018-01-06 15:06:05 ms:888:decode_slice_header error
I:2018-01-06 15:06:05 ms:888:no frame!


當數(shù)據如下,可以正確的解析出一幀圖像

00 00 00 01 67 42 00 2a 96 35 40 f0 04 4f cb 37 01 01 01 40 00 01 c2 00 00 57 e4?
01 00 00 00 01 68 ce 3c 80 00 00 00 01 06 e5 01 ef 80 00 00 03 00 00 00 01 65 b8?
00 00 52 58 00 00 27 f5 d4 48 7e b4 41 07 24 60 95 2c 92 37 68 75 63 4c ad 3f b1

很顯然,67是SPS,68是PPS,然后65是關鍵幀,開始出來圖像


結構體定義
/**
?* A reference to a data buffer.
?*
?* The size of this struct is not a part of the public ABI and it is not meant
?* to be allocated directly.
?*/
typedef struct AVBufferRef {
??? AVBuffer *buffer;
??? /**
???? * The data buffer. It is considered writable if and only if
???? * this is the only reference to the buffer, in which case
???? * av_buffer_is_writable() returns 1.
???? */
??? uint8_t *data;
??? /**
???? * Size of data in bytes.
???? */
??? int????? size;
} AVBufferRef;
/**
?* This structure stores compressed data. It is typically exported by demuxers
?* and then passed as input to decoders, or received as output from encoders and
?* then passed to muxers.
?*
?* For video, it should typically contain one compressed frame. For audio it may
?* contain several compressed frames.
?*
?* AVPacket is one of the few structs in FFmpeg, whose size is a part of public
?* ABI. Thus it may be allocated on stack and no new fields can be added to it
?* without libavcodec and libavformat major bump.
?*
?* The semantics of data ownership depends on the buf or destruct (deprecated)
?* fields. If either is set, the packet data is dynamically allocated and is
?* valid indefinitely until av_free_packet() is called (which in turn calls
?* av_buffer_unref()/the destruct callback to free the data). If neither is set,
?* the packet data is typically backed by some static buffer somewhere and is
?* only valid for a limited time (e.g. until the next read call when demuxing).
?*
?* The side data is always allocated with av_malloc() and is freed in
?* av_free_packet().
?*/
typedef struct AVPacket {
??? /**
???? * A reference to the reference-counted buffer where the packet data is
???? * stored.
???? * May be NULL, then the packet data is not reference-counted.
???? */
??? AVBufferRef *buf;
??? /**
???? * Presentation timestamp in AVStream->time_base units; the time at which
???? * the decompressed packet will be presented to the user.
???? * Can be AV_NOPTS_VALUE if it is not stored in the file.
???? * pts MUST be larger or equal to dts as presentation cannot happen before
???? * decompression, unless one wants to view hex dumps. Some formats misuse
???? * the terms dts and pts/cts to mean something different. Such timestamps
???? * must be converted to true pts/dts before they are stored in AVPacket.
???? */
??? int64_t pts;
??? /**
???? * Decompression timestamp in AVStream->time_base units; the time at which
???? * the packet is decompressed.
???? * Can be AV_NOPTS_VALUE if it is not stored in the file.
???? */
??? int64_t dts;
??? uint8_t *data;
??? int?? size;
??? int?? stream_index;
??? /**
???? * A combination of AV_PKT_FLAG values
???? */
??? int?? flags;
??? /**
???? * Additional packet data that can be provided by the container.
???? * Packet can contain several types of side information.
???? */
??? struct {
??????? uint8_t *data;
??????? int????? size;
??????? enum AVPacketSideDataType type;
??? } *side_data;
??? int side_data_elems;
??? /**
???? * Duration of this packet in AVStream->time_base units, 0 if unknown.
???? * Equals next_pts - this_pts in presentation order.
???? */
??? int?? duration;
#if FF_API_DESTRUCT_PACKET
??? attribute_deprecated
??? void? (*destruct)(struct AVPacket *);
??? attribute_deprecated
??? void? *priv;
#endif
??? int64_t pos;??????????????????????????? ///< byte position in stream, -1 if unknown
??? /**
???? * Time difference in AVStream->time_base units from the pts of this
???? * packet to the point at which the output from the decoder has converged
???? * independent from the availability of previous frames. That is, the
???? * frames are virtually identical no matter if decoding started from
???? * the very first frame or from this keyframe.
???? * Is AV_NOPTS_VALUE if unknown.
???? * This field is not the display duration of the current packet.
???? * This field has no meaning if the packet does not have AV_PKT_FLAG_KEY
???? * set.
???? *
???? * The purpose of this field is to allow seeking in streams that have no
???? * keyframes in the conventional sense. It corresponds to the
???? * recovery point SEI in H.264 and match_time_delta in NUT. It is also
???? * essential for some types of subtitle streams to ensure that all
???? * subtitles are correctly displayed after seeking.
???? */
??? int64_t convergence_duration;
} AVPacket;

網站題目:FFmpegAVPacket剖析以及使用
標題路徑:http://muchs.cn/article28/ghsjjp.html

成都網站建設公司_創(chuàng)新互聯(lián),為您提供微信公眾號、ChatGPT、、動態(tài)網站、面包屑導航、手機網站建設

廣告

聲明:本網站發(fā)布的內容(圖片、視頻和文字)以用戶投稿、用戶轉載內容為主,如果涉及侵權請盡快告知,我們將會在第一時間刪除。文章觀點不代表本網站立場,如需處理請聯(lián)系客服。電話:028-86922220;郵箱:631063699@qq.com。內容未經允許不得轉載,或轉載時需注明來源: 創(chuàng)新互聯(lián)

成都app開發(fā)公司