Initial community commit

This commit is contained in:
Jef
2024-09-24 14:54:57 +02:00
parent 537bcbc862
commit 20d28e80a5
16810 changed files with 4640254 additions and 2 deletions

605
Src/h264/MFTDecoder.cpp Normal file
View File

@ -0,0 +1,605 @@
#include "MFTDecoder.h"
#include <Mfapi.h>
#include <wmcodecdsp.h>
#include <Mferror.h>
//-----------------------------------------------------------------------------
// GetDefaultStride
//
// Gets the default stride for a video frame, assuming no extra padding bytes.
//
//-----------------------------------------------------------------------------
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
{
LONG lStride = 0;
// Try to get the default stride from the media type.
HRESULT hr = pType->GetUINT32(MF_MT_DEFAULT_STRIDE, (UINT32*)&lStride);
if (FAILED(hr))
{
// Attribute not set. Try to calculate the default stride.
GUID subtype = GUID_NULL;
UINT32 width = 0;
UINT32 height = 0;
// Get the subtype and the image size.
hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
if (SUCCEEDED(hr))
{
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
}
if (SUCCEEDED(hr))
{
hr = MFGetStrideForBitmapInfoHeader(subtype.Data1, width, &lStride);
}
// Set the attribute for later reference.
if (SUCCEEDED(hr))
{
(void)pType->SetUINT32(MF_MT_DEFAULT_STRIDE, UINT32(lStride));
}
}
if (SUCCEEDED(hr))
{
*plStride = lStride;
}
return hr;
}
MFTDecoder::MFTDecoder()
{
decoder = 0;
stride = 0;
width = 0;
height = 0;
}
MFTDecoder::~MFTDecoder()
{
if (decoder) {
decoder->Release();
}
}
static HRESULT CreateInputMediaType(IMFMediaType **_media_type)
{
HRESULT hr=E_FAIL;
IMFMediaType *media_type=0;
do {
hr = MFCreateMediaType(&media_type);
if (FAILED(hr)) {
break;
}
hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
if (FAILED(hr)) {
break;
}
hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264);
if (FAILED(hr)) {
break;
}
*_media_type = media_type;
return S_OK;
} while(0);
if (media_type) {
media_type->Release();
}
return hr;
}
HRESULT MFTDecoder::Open()
{
HRESULT hr=E_FAIL;
hr = CoCreateInstance(CLSID_CMSH264DecoderMFT, NULL, CLSCTX_INPROC_SERVER, __uuidof(IMFTransform), (void**)&decoder);
if (FAILED(hr)) {
return hr;
}
/* set input */
IMFMediaType *media_type=0;
hr = CreateInputMediaType(&media_type);
if (FAILED(hr)) {
return hr;
}
hr = decoder->SetInputType(0, media_type, 0);
media_type->Release();
if (FAILED(hr)) {
return hr;
}
/* set output */
hr = decoder->GetOutputAvailableType(0, 0, &media_type);
if (FAILED(hr)) {
return hr;
}
hr = decoder->SetOutputType(0, media_type, 0);
media_type->Release();
if (FAILED(hr)) {
return hr;
}
decoder->ProcessMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
decoder->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
return S_OK;
}
MFOffset MFTDecoder::MakeOffset(float v)
{
MFOffset offset{};
offset.value = short(v);
offset.fract = WORD(65536 * (v - offset.value));
return offset;
}
MFVideoArea MFTDecoder::MakeArea(float x, float y, DWORD width, DWORD height)
{
MFVideoArea area{};
area.OffsetX = MakeOffset(x);
area.OffsetY = MakeOffset(y);
area.Area.cx = width;
area.Area.cy = height;
return area;
}
HRESULT MFTDecoder::GetVideoDisplayArea(IMFMediaType* pType, MFVideoArea* pArea)
{
HRESULT hr = S_OK;
BOOL bPanScan = FALSE;
UINT32 width = 0, height = 0;
bPanScan = MFGetAttributeUINT32(pType, MF_MT_PAN_SCAN_ENABLED, FALSE);
// In pan-and-scan mode, try to get the pan-and-scan region.
if (bPanScan)
{
hr = pType->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)pArea,
sizeof(MFVideoArea), NULL);
}
// If not in pan-and-scan mode, or the pan-and-scan region is not set,
// get the minimimum display aperture.
if (!bPanScan || hr == MF_E_ATTRIBUTENOTFOUND)
{
hr = pType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)pArea,
sizeof(MFVideoArea), NULL);
if (hr == MF_E_ATTRIBUTENOTFOUND)
{
// Minimum display aperture is not set.
// For backward compatibility with some components,
// check for a geometric aperture.
hr = pType->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)pArea,
sizeof(MFVideoArea), NULL);
}
// Default: Use the entire video area.
if (hr == MF_E_ATTRIBUTENOTFOUND)
{
hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
if (SUCCEEDED(hr))
{
*pArea = MakeArea(0.0, 0.0, width, height);
}
}
}
return hr;
}
HRESULT MFTDecoder::GetOutputFormat(UINT *width, UINT *height, bool *flip, double *aspect)
{
HRESULT hr=E_FAIL;
IMFMediaType *media_type = 0;
MFVideoArea pArea;
do {
hr = decoder->GetOutputCurrentType(0, &media_type);
if (FAILED(hr)) {
break;
}
//if (width && height) {
// hr = MFGetAttributeSize(media_type, MF_MT_FRAME_SIZE, width, height);
// if (FAILED(hr)) {
// break;
// }
//}
if (width && height) {
hr = GetVideoDisplayArea(media_type, &pArea);
if (FAILED(hr)) {
break;
}
*width = pArea.Area.cx;
*height = pArea.Area.cy;
}
if (flip) {
LONG stride;
hr = GetDefaultStride(media_type, &stride);
if (FAILED(hr)) {
break;
}
*flip = stride<0;
}
if (aspect) {
MFRatio PAR = {0};
hr = MFGetAttributeRatio(media_type, MF_MT_PIXEL_ASPECT_RATIO,
(UINT32*)&PAR.Numerator,
(UINT32*)&PAR.Denominator);
if (FAILED(hr)) {
*aspect = 1.0;
} else {
*aspect = (double)PAR.Numerator / (double)PAR.Denominator;
}
}
} while(0);
if (media_type) {
media_type->Release();
}
return hr;
}
static HRESULT ConfigureOutput(IMFTransform *decoder, LONG *stride)
{
HRESULT hr = S_OK;
IMFMediaType *media_type = 0;
AM_MEDIA_TYPE *format = NULL;
int index=0;
while(SUCCEEDED(hr)) {
hr = decoder->GetOutputAvailableType(0, index++, &media_type);
if (FAILED(hr)) {
break;
}
media_type->GetRepresentation(FORMAT_MFVideoFormat, (LPVOID*)&format);
MFVIDEOFORMAT* z = (MFVIDEOFORMAT*)format->pbFormat;
unsigned int surface_format = z->surfaceInfo.Format;
media_type->FreeRepresentation(FORMAT_MFVideoFormat, (LPVOID)format);
if (surface_format == '21VY') { // MFVideoFormat_YV12
hr = GetDefaultStride(media_type, stride);
hr = decoder->SetOutputType(0, media_type, 0);
break;
}
}
if(media_type) {
media_type->Release();
}
return hr;
}
HRESULT MFTDecoder::Feed(const void *data, size_t data_size, uint64_t timestamp_hundred_nanos)
{
HRESULT hr=E_FAIL;
const BYTE start_code[] = {0, 0, 0, 1};
IMFMediaBuffer *buffer = 0;
BYTE *buffer_pointer = 0;
IMFSample *sample = 0;
do {
hr = MFCreateMemoryBuffer((DWORD)data_size+4, &buffer);
if (FAILED(hr)) {
break;
}
hr = buffer->Lock(&buffer_pointer, NULL, NULL);
if (FAILED(hr)) {
break;
}
memcpy(buffer_pointer, start_code, 4);
memcpy(buffer_pointer+4, data, data_size);
hr = buffer->Unlock();
if (FAILED(hr)) {
break;
}
hr = buffer->SetCurrentLength((DWORD)data_size+4);
if (FAILED(hr)) {
break;
}
hr = MFCreateSample(&sample);
if (FAILED(hr)) {
break;
}
hr = sample->AddBuffer(buffer);
if (FAILED(hr)) {
break;
}
hr = sample->SetSampleTime(timestamp_hundred_nanos);
if (FAILED(hr)) {
break;
}
hr = decoder->ProcessInput(0, sample, 0);
if (FAILED(hr)) {
break;
}
} while(0);
if (buffer) {
buffer->Release();
}
if (sample) {
sample->Release();
}
return hr;
}
HRESULT MFTDecoder::FeedRaw(const void *data, size_t data_size, uint64_t timestamp_hundred_nanos)
{
HRESULT hr=E_FAIL;
IMFMediaBuffer *buffer = 0;
BYTE *buffer_pointer = 0;
IMFSample *sample = 0;
do {
hr = MFCreateMemoryBuffer((DWORD)data_size, &buffer);
if (FAILED(hr)) {
break;
}
hr = buffer->Lock(&buffer_pointer, NULL, NULL);
if (FAILED(hr)) {
break;
}
memcpy(buffer_pointer, data, data_size);
hr = buffer->Unlock();
if (FAILED(hr)) {
break;
}
hr = buffer->SetCurrentLength((DWORD)data_size);
if (FAILED(hr)) {
break;
}
hr = MFCreateSample(&sample);
if (FAILED(hr)) {
break;
}
hr = sample->AddBuffer(buffer);
if (FAILED(hr)) {
break;
}
hr = sample->SetSampleTime(timestamp_hundred_nanos);
if (FAILED(hr)) {
break;
}
hr = decoder->ProcessInput(0, sample, 0);
if (FAILED(hr)) {
break;
}
} while(0);
if (buffer) {
buffer->Release();
}
if (sample) {
sample->Release();
}
return hr;
}
static HRESULT CreateOutputSample(IMFTransform *decoder, IMFSample **_output_sample)
{
HRESULT hr=E_FAIL;
MFT_OUTPUT_STREAM_INFO stream_info;
IMFMediaBuffer *media_buffer = 0;
IMFSample *sample = 0;
do {
hr = MFCreateSample(&sample);
if (FAILED(hr)) {
break;
}
hr = decoder->GetOutputStreamInfo(0, &stream_info);
if (FAILED(hr)) {
break;
}
hr = MFCreateAlignedMemoryBuffer(stream_info.cbSize, MF_16_BYTE_ALIGNMENT, &media_buffer);
if (FAILED(hr)) {
break;
}
hr = sample->AddBuffer(media_buffer);
if (FAILED(hr)) {
break;
}
if (media_buffer) {
media_buffer->Release();
}
*_output_sample = sample;
return S_OK;
} while(0);
if (sample) {
sample->Release();
}
if (media_buffer) {
media_buffer->Release();
}
return hr;
}
// Release the events that an MFT might allocate in IMFTransform::ProcessOutput().
static void ReleaseEventCollection(MFT_OUTPUT_DATA_BUFFER &pBuffers)
{
if (pBuffers.pEvents) {
pBuffers.pEvents->Release();
pBuffers.pEvents = NULL;
}
}
HRESULT MFTDecoder::GetFrame(IMFMediaBuffer **out_buffer, uint64_t *hundrednanos)
{
HRESULT hr=E_FAIL;
IMFSample *output_sample=0;
DWORD mftStatus;
do {
hr = CreateOutputSample(decoder, &output_sample);
if (FAILED(hr)) {
break;
}
MFT_OUTPUT_DATA_BUFFER mftDataBuffer = {0, };
mftDataBuffer.pSample = output_sample;
mftStatus = 0;
hr = decoder->ProcessOutput(0, 1, &mftDataBuffer, &mftStatus);
if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
break;
}
if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
ConfigureOutput(decoder, &stride);
width=0;
height=0;
} else if (FAILED(hr)) {
break;
} else {
if (mftDataBuffer.pSample) {
IMFMediaBuffer *mediaBuffer;
hr = mftDataBuffer.pSample->GetBufferByIndex(0, &mediaBuffer);
if (FAILED(hr)) {
break;
}
LONGLONG sample_time;
output_sample->GetSampleTime(&sample_time);
if (hundrednanos) {
*hundrednanos = sample_time;
}
*out_buffer = mediaBuffer;
}
ReleaseEventCollection(mftDataBuffer);
}
} while (0);
if (output_sample) {
output_sample->Release();
}
return hr;
}
HRESULT MFTDecoder::Flush()
{
return decoder->ProcessMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
}
HRESULT MFTDecoder::Drain()
{
return decoder->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
}
HRESULT MFTDecoder::GetFrame(YV12_PLANES **data, void **decoder_data, uint64_t *mft_timestamp)
{
HRESULT hr=E_FAIL;
IMFMediaBuffer *media_buffer = 0;
IMFMediaType *media_type = 0;
do {
if (!height || !stride) {
hr = decoder->GetOutputCurrentType(0, &media_type);
if (FAILED(hr)) {
break;
}
hr = MFGetAttributeSize(media_type, MF_MT_FRAME_SIZE, &width, &height);
if (FAILED(hr)) {
break;
}
hr = GetDefaultStride(media_type, &stride);
if (FAILED(hr)) {
break;
}
}
hr = this->GetFrame(&media_buffer, mft_timestamp);
if (FAILED(hr)) {
break;
}
YV12_PLANES *planes = (YV12_PLANES *)malloc(sizeof(YV12_PLANES));
IMF2DBuffer *buffer2d=0;
if (SUCCEEDED(media_buffer->QueryInterface(&buffer2d))) {
BYTE *pbScanline0;
LONG pitch;
buffer2d->Lock2D(&pbScanline0, &pitch);
planes->y.baseAddr = pbScanline0;
planes->y.rowBytes = pitch;
pbScanline0 += pitch * height;
planes->v.baseAddr = pbScanline0;
planes->v.rowBytes = pitch/2;
pbScanline0 += pitch * height/4;
planes->u.baseAddr = pbScanline0;
planes->u.rowBytes = pitch/2;
buffer2d->Release();
} else {
DWORD length, max_length;
BYTE *video_data;
media_buffer->Lock(&video_data, &length, &max_length);
planes->y.baseAddr = video_data;
planes->y.rowBytes = stride;
video_data += stride * height;
planes->v.baseAddr = video_data;
planes->v.rowBytes = stride/2;
video_data += (stride/2) * (height/2);
planes->u.baseAddr = video_data;
planes->u.rowBytes = stride/2;
}
*data = planes;
*decoder_data = media_buffer;
} while(0);
if (media_type) {
media_type->Release();
}
return hr;
}
HRESULT MFTDecoder::FreeFrame(YV12_PLANES *data, void *decoder_data)
{
IMFMediaBuffer *buffer= (IMFMediaBuffer *)decoder_data;
if (buffer) {
IMF2DBuffer *buffer2d=0;
if (SUCCEEDED(buffer->QueryInterface(&buffer2d))) {
buffer2d->Unlock2D();
buffer2d->Release();
} else {
buffer->Unlock();
}
buffer->Release();
}
free(data);
return S_OK;
}

38
Src/h264/MFTDecoder.h Normal file
View File

@ -0,0 +1,38 @@
#pragma once
#include <Mftransform.h>
#include <bfc/platform/types.h>
#include "../Winamp/wa_ipc.h"
struct nullsoft_h264_frame_data
{
void *data;
void *decoder_data;
uint64_t local_timestamp;
};
class MFTDecoder
{
public:
MFTDecoder();
~MFTDecoder();
HRESULT Open();
HRESULT Feed(const void *data, size_t data_size, uint64_t timestamp_hundred_nanos);
HRESULT FeedRaw(const void *data, size_t data_size, uint64_t timestamp_hundred_nanos);
HRESULT GetFrame(IMFMediaBuffer **output_buffer, uint64_t *hundrednanos);
HRESULT GetFrame(YV12_PLANES **data, void **decoder_data, uint64_t *mft_timestamp);
HRESULT FreeFrame(YV12_PLANES *data, void *decoder_data);
HRESULT GetOutputFormat(UINT *width, UINT *height, bool *flip, double *aspect);
HRESULT Flush();
HRESULT Drain();
HRESULT GetVideoDisplayArea(IMFMediaType* pType, MFVideoArea* pArea);
IMFTransform *decoder;
LONG stride;
UINT32 width, height;
private:
MFVideoArea MakeArea(float x, float y, DWORD width, DWORD height);
MFOffset MakeOffset(float v);
};

22
Src/h264/NSVFactory.cpp Normal file
View File

@ -0,0 +1,22 @@
#include "NSVFactory.h"
#include "nsv_h264_decoder.h"
#include "api__h264.h"
#include "../nsv/nsvlib.h"
IVideoDecoder *NSVFactory::CreateVideoDecoder(int w, int h, double framerate, unsigned int fmt, int *flip)
{
if (fmt == NSV_MAKETYPE('H','2','6','4'))
{
*flip=0;
void *mem = WASABI_API_MEMMGR->sysMalloc(sizeof(H264_Decoder));
H264_Decoder *dec = new (mem) H264_Decoder();
return dec;
}
return NULL;
}
#define CBCLASS NSVFactory
START_DISPATCH;
CB(SVC_NSVFACTORY_CREATEVIDEODECODER, CreateVideoDecoder)
END_DISPATCH;
#undef CBCLASS

20
Src/h264/NSVFactory.h Normal file
View File

@ -0,0 +1,20 @@
#pragma once
#include "../nsv/svc_nsvFactory.h"
// {262CCE92-78DC-47a9-AFFB-2471799CA799}
static const GUID h264_nsv_guid =
{ 0x262cce92, 0x78dc, 0x47a9, { 0xaf, 0xfb, 0x24, 0x71, 0x79, 0x9c, 0xa7, 0x99 } };
class NSVFactory : public svc_nsvFactory
{
public:
static const char *getServiceName() { return "H.264 NSV Decoder"; }
static GUID getServiceGuid() { return h264_nsv_guid; }
IVideoDecoder *CreateVideoDecoder(int w, int h, double framerate, unsigned int fmt, int *flip);
protected:
RECVS_DISPATCH;
};

224
Src/h264/annexb.c Normal file
View File

@ -0,0 +1,224 @@
#include "annexb.h"
#include <bfc/platform/types.h>
enum
{
InitialUnit = 0,
NewUnit = 1, // start finding start code during AddData
MidUnit = 2, // need to find the next start code from next AddData call to form a complete unit
UnitReady = 3, // a new unit is ready and we are waiting for a GetUnit call
};
typedef struct annex_b_demuxer
{
size_t buffer_position;
size_t number_of_zero_words; // number of zero words as identified from the first unit
size_t current_zero_words; // current zero word count, saved in case NALU crosses two AddData calls
int end_of_stream; // set to 1 when there's no more data (so we know not to look for the next start code)
int state;
size_t buffer_size;
uint8_t buffer[1]; // make sure this is last
} AnnexBDemuxer;
int AddData(const uint8_t **data, size_t *data_len); // data and length remaining are updated on exit. if data_len>0 on exit, call again after calling GetUnit
void EndOfStream();
h264_annexb_demuxer_t AnnexB_Create(int size)
{
AnnexBDemuxer *demuxer = (AnnexBDemuxer *)malloc(sizeof(AnnexBDemuxer) + size);
demuxer->buffer_size = size; // MAX_CODED_FRAME_SIZE;
demuxer->state = InitialUnit;
demuxer->buffer_position = 0;
demuxer->number_of_zero_words = 0;
demuxer->current_zero_words = 0;
demuxer->end_of_stream = 0;
return (h264_annexb_demuxer_t)demuxer;
}
static int AnnexB_GetByte(const uint8_t **data, size_t *data_len, uint8_t *data_byte)
{
if (*data_len)
{
*data_byte = **data;
*data = *data + 1;
*data_len = *data_len - 1;;
return 1;
}
else
return 0;
}
int AnnexB_AddData(h264_annexb_demuxer_t d, const void **_data, size_t *data_len)
{
AnnexBDemuxer *demuxer = (AnnexBDemuxer *)d;
if (demuxer)
{
const uint8_t **data = (const uint8_t **)_data; // cast to something easier to do pointer math with
if (demuxer->state == InitialUnit)
{
// find start code with unknown number of initial zero bytes
while(demuxer->number_of_zero_words == 0)
{
uint8_t data_byte;
if (AnnexB_GetByte(data, data_len, &data_byte))
{
if (data_byte == 0)
{
demuxer->current_zero_words++;
}
else if (data_byte == 1 && demuxer->current_zero_words >= 2)
{
demuxer->number_of_zero_words = demuxer->current_zero_words;
demuxer->current_zero_words = 0;
demuxer->state = MidUnit;
}
else
{
// re-sync
demuxer->current_zero_words = 0;
}
}
else
{
return AnnexB_NeedMoreData;
}
}
}
else if (demuxer->state == NewUnit)
{
// find start code with known number of initial zero b ytes
while (demuxer->state == NewUnit)
{
uint8_t data_byte;
if (AnnexB_GetByte(data, data_len, &data_byte))
{
if (data_byte == 0)
{
demuxer->current_zero_words++;
}
else if (data_byte == 1 && demuxer->current_zero_words >= 2) // we might get more start words than required
{
demuxer->current_zero_words = 0;
demuxer->state = MidUnit;
}
else
{
// re-sync
demuxer->current_zero_words = 0;
}
}
else
{
return AnnexB_NeedMoreData;
}
}
}
if (demuxer->state == MidUnit) // no else because we fall through during the start code scanning)
{
uint8_t data_byte;
while (AnnexB_GetByte(data, data_len, &data_byte))
{
if (data_byte == 0)
{
demuxer->current_zero_words++; // might be the next start word
/* if (demuxer->current_zero_words == 3) // 00 00 00 is also a valid sequence for end-of-nal detection.
{
demuxer->state = UnitReady;
return AnnexB_UnitAvailable;
}*/
}
else if (data_byte == 1 && demuxer->current_zero_words >= 2)
{
while (demuxer->current_zero_words > demuxer->number_of_zero_words)
{
// write trailing zero bytes to stream
if (demuxer->buffer_position >= demuxer->buffer_size)
return AnnexB_BufferFull;
demuxer->buffer[demuxer->buffer_position++] = 0;
demuxer->current_zero_words--;
}
demuxer->current_zero_words = 0;
demuxer->state = UnitReady;
return AnnexB_UnitAvailable;
}
else
{
while (demuxer->current_zero_words)
{
// write any zero bytes that we read to the stream
if (demuxer->buffer_position >= demuxer->buffer_size)
return AnnexB_BufferFull;
demuxer->buffer[demuxer->buffer_position++] = 0;
demuxer->current_zero_words--;
}
if (demuxer->buffer_position >= demuxer->buffer_size)
return AnnexB_BufferFull;
demuxer->buffer[demuxer->buffer_position++] = data_byte;
}
}
if (demuxer->end_of_stream)
{
demuxer->state = UnitReady;
}
else
{
return AnnexB_NeedMoreData;
}
}
if (demuxer->state == UnitReady)
return AnnexB_UnitAvailable;
return AnnexB_NeedMoreData; // dunno how we'd get here
}
else
return AnnexB_Error;
}
void AnnexB_EndOfStream(h264_annexb_demuxer_t d)
{
AnnexBDemuxer *demuxer = (AnnexBDemuxer *)d;
if (demuxer)
demuxer->end_of_stream = 1;
}
int AnnexB_GetUnit(h264_annexb_demuxer_t d, const void **data, size_t *data_len)
{
AnnexBDemuxer *demuxer = (AnnexBDemuxer *)d;
if (demuxer)
{
if (demuxer->state == UnitReady)
{
*data = demuxer->buffer;
*data_len = demuxer->buffer_position;
demuxer->buffer_position = 0;
// if we've found the next start code, go to MidUnit state
if (demuxer->current_zero_words == 0)
{
demuxer->state = MidUnit;
}
else // no start code, need to find it
{
demuxer->state = NewUnit;
}
return AnnexB_UnitAvailable;
}
else
{
return AnnexB_NeedMoreData;
}
}
return AnnexB_Error;
}
void AnnexB_Destroy(h264_annexb_demuxer_t d)
{
AnnexBDemuxer *demuxer = (AnnexBDemuxer *)d;
if (demuxer)
free(demuxer);
}

24
Src/h264/annexb.h Normal file
View File

@ -0,0 +1,24 @@
#pragma once
#include <bfc/platform/types.h>
#ifdef __cplusplus
extern "C" {
#endif
enum
{
AnnexB_UnitAvailable = 0, // data was added succesfully and a new unit is available via GetUnit().
AnnexB_BufferFull = 1, // no start code found within the maximum unit length
AnnexB_NeedMoreData = 2, // no unit ready yet, pass in the next data chunk
AnnexB_Error = 3, // general error (out of memory, null pointer, etc)
};
typedef void *h264_annexb_demuxer_t;
h264_annexb_demuxer_t AnnexB_Create(int size);
void AnnexB_Destroy(h264_annexb_demuxer_t demuxer);
int AnnexB_AddData(h264_annexb_demuxer_t demuxer, const void **data, size_t *data_len);
void AnnexB_EndOfStream(h264_annexb_demuxer_t demuxer);
int AnnexB_GetUnit(h264_annexb_demuxer_t demuxer, const void **data, size_t *data_len);
#ifdef __cplusplus
}
#endif

13
Src/h264/api__h264.h Normal file
View File

@ -0,0 +1,13 @@
#ifndef NULLSOFT_APIH
#define NULLSOFT_APIH
#include <api/service/api_service.h>
#include <api/memmgr/api_memmgr.h>
extern api_memmgr *memmgrApi;
#define WASABI_API_MEMMGR memmgrApi
#include "../winamp/api_winamp.h"
extern api_winamp *winampApi;
#define AGAVE_API_WINAMP winampApi
#endif

View File

@ -0,0 +1,144 @@
#include "avi_h264_decoder.h"
#include "../Winamp/wa_ipc.h"
#include <mmsystem.h>
#include <assert.h>
#include <Mferror.h>
int AVIDecoderCreator::CreateVideoDecoder(const nsavi::AVIH *avi_header, const nsavi::STRH *stream_header, const nsavi::STRF *stream_format, const nsavi::STRD *stream_data, ifc_avivideodecoder **decoder)
{
nsavi::video_format *format = (nsavi::video_format *)stream_format;
if (format)
{
if (format->compression == '462H')
{
MFTDecoder *ctx = new MFTDecoder();
if (!ctx)
return CREATEDECODER_FAILURE;
if (FAILED(ctx->Open())) {
delete ctx;
return CREATEDECODER_FAILURE;
}
*decoder = new AVIH264(ctx, stream_header);
return CREATEDECODER_SUCCESS;
}
}
return CREATEDECODER_NOT_MINE;
}
#define CBCLASS AVIDecoderCreator
START_DISPATCH;
CB(CREATE_VIDEO_DECODER, CreateVideoDecoder)
END_DISPATCH;
#undef CBCLASS
AVIH264::AVIH264(MFTDecoder *ctx, const nsavi::STRH *stream_header) : decoder(ctx), stream_header(stream_header)
{
}
AVIH264::~AVIH264()
{
for ( size_t i = 0; i < buffered_frames.size(); i++ )
{
nullsoft_h264_frame_data frame_data = buffered_frames[ i ];
decoder->FreeFrame( (YV12_PLANES *)frame_data.data, frame_data.decoder_data );
}
delete decoder;
}
int AVIH264::GetOutputProperties(int *x, int *y, int *color_format, double *aspect_ratio, int *flip)
{
UINT width, height;
bool local_flip=false;
if (SUCCEEDED(decoder->GetOutputFormat(&width, &height, &local_flip, aspect_ratio))) {
*x = width;
*y = height;
*color_format = '21VY';
*flip = local_flip;
return AVI_SUCCESS;
}
return AVI_FAILURE;
}
int AVIH264::DecodeChunk(uint16_t type, const void *inputBuffer, size_t inputBufferBytes)
{
for (;;) {
HRESULT hr = decoder->FeedRaw(inputBuffer, inputBufferBytes, 0);
if (hr == MF_E_NOTACCEPTING) {
nullsoft_h264_frame_data frame_data;
if (FAILED(decoder->GetFrame((YV12_PLANES **)&frame_data.data, &frame_data.decoder_data, &frame_data.local_timestamp))) {
continue;
}
buffered_frames.push_back(frame_data);
} else if (FAILED(hr)) {
return AVI_FAILURE;
} else {
break;
}
}
return AVI_SUCCESS;
}
void AVIH264::Flush()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder->FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
decoder->Flush();
}
int AVIH264::GetPicture(void **data, void **decoder_data)
{
if (!buffered_frames.empty()) {
nullsoft_h264_frame_data frame_data = buffered_frames[0];
buffered_frames.erase(buffered_frames.begin());
*data = frame_data.data;
*decoder_data = frame_data.decoder_data;
return AVI_SUCCESS;
}
if (SUCCEEDED(decoder->GetFrame((YV12_PLANES **)data, decoder_data, 0))) {
return AVI_SUCCESS;
} else {
return AVI_FAILURE;
}
}
void AVIH264::FreePicture(void *data, void *decoder_data)
{
decoder->FreeFrame((YV12_PLANES *)data, decoder_data);
}
void AVIH264::EndOfStream()
{
decoder->Drain();
}
void AVIH264::HurryUp(int state)
{
// TODO(benski)
//if (decoder)
// H264_HurryUp(decoder, state);
}
void AVIH264::Close()
{
delete this;
}
#define CBCLASS AVIH264
START_DISPATCH;
CB(GET_OUTPUT_PROPERTIES, GetOutputProperties)
CB(DECODE_CHUNK, DecodeChunk)
VCB(FLUSH, Flush)
VCB(CLOSE, Close)
CB(GET_PICTURE, GetPicture)
VCB(FREE_PICTURE, FreePicture)
VCB(END_OF_STREAM, EndOfStream)
VCB(HURRY_UP, HurryUp)
END_DISPATCH;
#undef CBCLASS

View File

@ -0,0 +1,42 @@
#pragma once
#include "../Plugins/Input/in_avi/ifc_avivideodecoder.h"
#include "../Plugins/Input/in_avi/svc_avidecoder.h"
#include "MFTDecoder.h"
#include <vector>
// {AFA1BB51-F41B-4522-9251-25A8DF923DBE}
static const GUID avi_h264_guid =
{ 0xafa1bb51, 0xf41b, 0x4522, { 0x92, 0x51, 0x25, 0xa8, 0xdf, 0x92, 0x3d, 0xbe } };
class AVIDecoderCreator : public svc_avidecoder
{
public:
static const char *getServiceName() { return "H.264 AVI Decoder"; }
static GUID getServiceGuid() { return avi_h264_guid; }
int CreateVideoDecoder(const nsavi::AVIH *avi_header, const nsavi::STRH *stream_header, const nsavi::STRF *stream_format, const nsavi::STRD *stream_data, ifc_avivideodecoder **decoder);
protected:
RECVS_DISPATCH;
};
class AVIH264 : public ifc_avivideodecoder
{
public:
AVIH264(MFTDecoder *ctx, const nsavi::STRH *stream_header);
~AVIH264();
int GetOutputProperties(int *x, int *y, int *color_format, double *aspect_ratio, int *flip);
int DecodeChunk(uint16_t type, const void *inputBuffer, size_t inputBufferBytes);
void Flush();
void Close();
int GetPicture(void **data, void **decoder_data);
void FreePicture(void *data, void *decoder_data);
void EndOfStream();
void HurryUp(int state);
private:
MFTDecoder *decoder;
const nsavi::STRH *stream_header;
std::vector<nullsoft_h264_frame_data> buffered_frames;
protected:
RECVS_DISPATCH;
};

76
Src/h264/h264.rc Normal file
View File

@ -0,0 +1,76 @@
// Microsoft Visual C++ generated resource script.
//
#include "resource.h"
#define APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 2 resource.
//
#include "afxres.h"
/////////////////////////////////////////////////////////////////////////////
#undef APSTUDIO_READONLY_SYMBOLS
/////////////////////////////////////////////////////////////////////////////
// English (U.S.) resources
#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
#ifdef _WIN32
LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
#pragma code_page(1252)
#endif //_WIN32
#endif // English (U.S.) resources
/////////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////////
// English (U.K.) resources
#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENG)
#ifdef _WIN32
LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_UK
#pragma code_page(1252)
#endif //_WIN32
#ifdef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// TEXTINCLUDE
//
1 TEXTINCLUDE
BEGIN
"resource.h\0"
END
2 TEXTINCLUDE
BEGIN
"#include ""afxres.h""\r\n"
"\0"
END
3 TEXTINCLUDE
BEGIN
"#include ""version.rc2""\r\n"
"\0"
END
#endif // APSTUDIO_INVOKED
#endif // English (U.K.) resources
/////////////////////////////////////////////////////////////////////////////
#ifndef APSTUDIO_INVOKED
/////////////////////////////////////////////////////////////////////////////
//
// Generated from the TEXTINCLUDE 3 resource.
//
#include "version.rc2"
/////////////////////////////////////////////////////////////////////////////
#endif // not APSTUDIO_INVOKED

41
Src/h264/h264.sln Normal file
View File

@ -0,0 +1,41 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.29613.14
MinimumVisualStudioVersion = 10.0.40219.1
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "h264", "h264.vcxproj", "{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libmp4v2", "..\libmp4v2\libmp4v2.vcxproj", "{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Debug|x64 = Debug|x64
Release|Win32 = Release|Win32
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Debug|Win32.ActiveCfg = Debug|Win32
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Debug|Win32.Build.0 = Debug|Win32
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Debug|x64.ActiveCfg = Debug|x64
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Debug|x64.Build.0 = Debug|x64
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Release|Win32.ActiveCfg = Release|Win32
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Release|Win32.Build.0 = Release|Win32
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Release|x64.ActiveCfg = Release|x64
{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}.Release|x64.Build.0 = Release|x64
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Debug|Win32.ActiveCfg = Debug|Win32
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Debug|Win32.Build.0 = Debug|Win32
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Debug|x64.ActiveCfg = Debug|x64
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Debug|x64.Build.0 = Debug|x64
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Release|Win32.ActiveCfg = Release|Win32
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Release|Win32.Build.0 = Release|Win32
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Release|x64.ActiveCfg = Release|x64
{EFB9B882-6A8B-463D-A8E3-A2807AFC5D9F}.Release|x64.Build.0 = Release|x64
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {65BB345C-17C8-42EF-856A-51A9F75EA0E7}
EndGlobalSection
EndGlobal

271
Src/h264/h264.vcxproj Normal file
View File

@ -0,0 +1,271 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="15.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{CB4658FB-5E5B-4B1D-8341-6E31E41B49E4}</ProjectGuid>
<RootNamespace>h264</RootNamespace>
<WindowsTargetPlatformVersion>10.0.19041.0</WindowsTargetPlatformVersion>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<PlatformToolset>v142</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<PlatformToolset>v142</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<PlatformToolset>v142</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>DynamicLibrary</ConfigurationType>
<PlatformToolset>v142</PlatformToolset>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<LinkIncremental>false</LinkIncremental>
<OutDir>$(PlatformShortName)_$(Configuration)\</OutDir>
<IntDir>$(PlatformShortName)_$(Configuration)\</IntDir>
<TargetExt>.w5s</TargetExt>
<IncludePath>$(IncludePath)</IncludePath>
<LibraryPath>$(LibraryPath)</LibraryPath>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LinkIncremental>false</LinkIncremental>
<OutDir>$(PlatformShortName)_$(Configuration)\</OutDir>
<IntDir>$(PlatformShortName)_$(Configuration)\</IntDir>
<TargetExt>.w5s</TargetExt>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
<OutDir>$(PlatformShortName)_$(Configuration)\</OutDir>
<IntDir>$(PlatformShortName)_$(Configuration)\</IntDir>
<TargetExt>.w5s</TargetExt>
<IncludePath>$(IncludePath)</IncludePath>
<LibraryPath>$(LibraryPath)</LibraryPath>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LinkIncremental>false</LinkIncremental>
<OutDir>$(PlatformShortName)_$(Configuration)\</OutDir>
<IntDir>$(PlatformShortName)_$(Configuration)\</IntDir>
<TargetExt>.w5s</TargetExt>
</PropertyGroup>
<PropertyGroup Label="Vcpkg" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<VcpkgConfiguration>Debug</VcpkgConfiguration>
<VcpkgTriplet>x86-windows-static-md</VcpkgTriplet>
</PropertyGroup>
<PropertyGroup Label="Vcpkg" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<VcpkgTriplet>x86-windows-static-md</VcpkgTriplet>
</PropertyGroup>
<PropertyGroup Label="Vcpkg" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<VcpkgTriplet>x86-windows-static-md</VcpkgTriplet>
</PropertyGroup>
<PropertyGroup Label="Vcpkg" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<VcpkgTriplet>x86-windows-static-md</VcpkgTriplet>
<VcpkgConfiguration>Debug</VcpkgConfiguration>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>$(ProjectDir)..\external_dependencies\intel_ipp_6.1.1.035\ia32\include;../Wasabi;../libmp4v2;../libmp4v2/include;../h264dec/lcommon/inc;../h264dec/ldecod/inc</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;_USRDLL;H264_EXPORTS;_WIN32_WINNT=0x601;H264_IPP;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>false</MinimalRebuild>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<ProgramDataBaseFileName>$(IntDir)$(TargetName).pdb</ProgramDataBaseFileName>
</ClCompile>
<Link>
<AdditionalDependencies>ws2_32.lib;Mfplat.lib;wmcodecdspuuid.lib;mfuuid.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(TargetName)$(TargetExt)</OutputFile>
<AdditionalLibraryDirectories>..\intel_ipp_6.1.1.035\ia32\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(IntDir)$(TargetName).pdb</ProgramDatabaseFile>
<SubSystem>Windows</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
</Link>
<PostBuildEvent>
<Command>xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\
xcopy /Y /D $(IntDir)$(TargetName).pdb ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\ </Command>
<Message>Post build event: 'xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\'</Message>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>$(ProjectDir)..\external_dependencies\intel_ipp_6.1.1.035\ia32\include;../Wasabi;../libmp4v2;../libmp4v2/include;../h264dec/lcommon/inc;../h264dec/ldecod/inc</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN64;_DEBUG;_WINDOWS;_USRDLL;H264_EXPORTS;_WIN32_WINNT=0x601;H264_IPP;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>false</MinimalRebuild>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<ProgramDataBaseFileName>$(IntDir)$(TargetName).pdb</ProgramDataBaseFileName>
</ClCompile>
<Link>
<AdditionalDependencies>ws2_32.lib;Mfplat.lib;wmcodecdspuuid.lib;mfuuid.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(TargetName)$(TargetExt)</OutputFile>
<AdditionalLibraryDirectories>..\intel_ipp_6.1.1.035\em64t\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(IntDir)$(TargetName).pdb</ProgramDatabaseFile>
<SubSystem>Windows</SubSystem>
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
</Link>
<PostBuildEvent>
<Command>xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\
xcopy /Y /D $(IntDir)$(TargetName).pdb ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\ </Command>
<Message>Post build event: 'xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\'</Message>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<Optimization>MaxSpeed</Optimization>
<IntrinsicFunctions>true</IntrinsicFunctions>
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
<OmitFramePointers>true</OmitFramePointers>
<AdditionalIncludeDirectories>$(ProjectDir)..\external_dependencies\intel_ipp_6.1.1.035\ia32\include;../Wasabi;../libmp4v2;../libmp4v2/include;../h264dec/lcommon/inc;../h264dec/ldecod/inc</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;H264_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<StringPooling>true</StringPooling>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<BufferSecurityCheck>true</BufferSecurityCheck>
<FunctionLevelLinking>true</FunctionLevelLinking>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>None</DebugInformationFormat>
<ProgramDataBaseFileName>$(IntDir)$(TargetName).pdb</ProgramDataBaseFileName>
</ClCompile>
<Link>
<AdditionalDependencies>ws2_32.lib;Mfplat.lib;wmcodecdspuuid.lib;mfuuid.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(TargetName)$(TargetExt)</OutputFile>
<AdditionalLibraryDirectories>..\intel_ipp_6.1.1.035\ia32\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<DelayLoadDLLs>ws2_32.dll;Mfplat.dll;ole32.dll;%(DelayLoadDLLs)</DelayLoadDLLs>
<GenerateDebugInformation>false</GenerateDebugInformation>
<ProgramDatabaseFile>$(IntDir)$(TargetName).pdb</ProgramDatabaseFile>
<SubSystem>Windows</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<ImportLibrary>$(ProjectDir)x86_Release\$(ProjectName).lib</ImportLibrary>
<TargetMachine>MachineX86</TargetMachine>
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
</Link>
<PostBuildEvent>
<Command>xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\ </Command>
<Message>Post build event: 'xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\'</Message>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<Optimization>MaxSpeed</Optimization>
<IntrinsicFunctions>true</IntrinsicFunctions>
<FavorSizeOrSpeed>Speed</FavorSizeOrSpeed>
<OmitFramePointers>true</OmitFramePointers>
<AdditionalIncludeDirectories>$(ProjectDir)..\external_dependencies\intel_ipp_6.1.1.035\ia32\include;../Wasabi;../libmp4v2;../libmp4v2/include;../h264dec/lcommon/inc;../h264dec/ldecod/inc</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN64;NDEBUG;_WINDOWS;_USRDLL;H264_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<StringPooling>true</StringPooling>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<BufferSecurityCheck>true</BufferSecurityCheck>
<FunctionLevelLinking>true</FunctionLevelLinking>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>None</DebugInformationFormat>
<ProgramDataBaseFileName>$(IntDir)$(TargetName).pdb</ProgramDataBaseFileName>
</ClCompile>
<Link>
<AdditionalDependencies>ws2_32.lib;Mfplat.lib;wmcodecdspuuid.lib;mfuuid.lib;%(AdditionalDependencies)</AdditionalDependencies>
<OutputFile>$(OutDir)$(TargetName)$(TargetExt)</OutputFile>
<AdditionalLibraryDirectories>..\intel_ipp_6.1.1.035\em64t\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
<DelayLoadDLLs>ws2_32.dll;Mfplat.dll;ole32.dll;%(DelayLoadDLLs)</DelayLoadDLLs>
<GenerateDebugInformation>false</GenerateDebugInformation>
<ProgramDatabaseFile>$(IntDir)$(TargetName).pdb</ProgramDatabaseFile>
<SubSystem>Windows</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<ImportLibrary>$(ProjectDir)x64_Release\$(ProjectName).lib</ImportLibrary>
<ImageHasSafeExceptionHandlers>false</ImageHasSafeExceptionHandlers>
</Link>
<PostBuildEvent>
<Command>xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\ </Command>
<Message>Post build event: 'xcopy /Y /D $(OutDir)$(TargetName)$(TargetExt) ..\..\Build\Winamp_$(PlatformShortName)_$(Configuration)\System\'</Message>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemGroup>
<ProjectReference Include="..\external_dependencies\libmp4v2\libmp4v2.vcxproj">
<Project>{efb9b882-6a8b-463d-a8e3-a2807afc5d9f}</Project>
</ProjectReference>
<ProjectReference Include="..\Wasabi\Wasabi.vcxproj">
<Project>{3e0bfa8a-b86a-42e9-a33f-ec294f823f7f}</Project>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<ClCompile Include="annexb.c" />
<ClCompile Include="avi_h264_decoder.cpp" />
<ClCompile Include="h264_flv_decoder.cpp" />
<ClCompile Include="h264_mkv_decoder.cpp" />
<ClCompile Include="h264_mp4_decoder.cpp" />
<ClCompile Include="main.cpp" />
<ClCompile Include="MFTDecoder.cpp" />
<ClCompile Include="NSVFactory.cpp" />
<ClCompile Include="nsv_h264_decoder.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\in_mp4\mpeg4video.h" />
<ClInclude Include="annexb.h" />
<ClInclude Include="api__h264.h" />
<ClInclude Include="avi_h264_decoder.h" />
<ClInclude Include="h264_flv_decoder.h" />
<ClInclude Include="h264_mkv_decoder.h" />
<ClInclude Include="h264_mp4_decoder.h" />
<ClInclude Include="MFTDecoder.h" />
<ClInclude Include="NSVFactory.h" />
<ClInclude Include="nsv_h264_decoder.h" />
<ClInclude Include="resource.h" />
</ItemGroup>
<ItemGroup>
<ResourceCompile Include="h264.rc" />
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@ -0,0 +1,83 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="annexb.c">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="avi_h264_decoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="h264_flv_decoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="h264_mkv_decoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="h264_mp4_decoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="main.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="MFTDecoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="nsv_h264_decoder.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="NSVFactory.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="annexb.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="api__h264.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="avi_h264_decoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="h264_flv_decoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="h264_mkv_decoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="h264_mp4_decoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="MFTDecoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\in_mp4\mpeg4video.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="nsv_h264_decoder.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="NSVFactory.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="resource.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<Filter Include="Header Files">
<UniqueIdentifier>{2b9031b7-6c12-48a4-b823-a82fe9fbd180}</UniqueIdentifier>
</Filter>
<Filter Include="Ressource Files">
<UniqueIdentifier>{56f52a2e-633e-436b-aec7-d2db3331ad81}</UniqueIdentifier>
</Filter>
<Filter Include="Source Files">
<UniqueIdentifier>{6021c9a2-1324-4073-a278-8c8d47db3ae9}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ResourceCompile Include="h264.rc">
<Filter>Ressource Files</Filter>
</ResourceCompile>
</ItemGroup>
</Project>

View File

@ -0,0 +1,220 @@
#include "h264_flv_decoder.h"
#include "../Winamp/wa_ipc.h" // for YV12_PLANES
#include <Mferror.h>
int FLVDecoderCreator::CreateVideoDecoder(int format_type, int width, int height, ifc_flvvideodecoder **decoder)
{
if (format_type == FLV::VIDEO_FORMAT_AVC)
{
MFTDecoder *ctx = new MFTDecoder();
if (!ctx || FAILED(ctx->Open())) {
delete ctx;
return CREATEDECODER_FAILURE;
}
*decoder = new FLVH264(ctx);
return CREATEDECODER_SUCCESS;
}
return CREATEDECODER_NOT_MINE;
}
int FLVDecoderCreator::HandlesVideo(int format_type)
{
if (format_type == FLV::VIDEO_FORMAT_AVC)
{
return CREATEDECODER_SUCCESS;
}
return CREATEDECODER_NOT_MINE;
}
#define CBCLASS FLVDecoderCreator
START_DISPATCH;
CB(CREATE_VIDEO_DECODER, CreateVideoDecoder)
CB(HANDLES_VIDEO, HandlesVideo)
END_DISPATCH;
#undef CBCLASS
/* --- */
uint32_t GetNALUSize(uint64_t nalu_size_bytes, const uint8_t *h264_data, size_t data_len);
uint32_t Read24(const uint8_t *data);
FLVH264::FLVH264(MFTDecoder *decoder) : decoder(decoder)
{
sequence_headers_parsed=0;
nalu_size_bytes=0;
}
FLVH264::~FLVH264()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder->FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
delete decoder;
}
int FLVH264::GetOutputFormat(int *x, int *y, int *color_format)
{
UINT width, height;
bool local_flip=false;
double aspect_ratio;
if (SUCCEEDED(decoder->GetOutputFormat(&width, &height, &local_flip, &aspect_ratio))) {
*x = width;
*y = height;
*color_format = '21VY';
return FLV_VIDEO_SUCCESS;
}
return FLV_VIDEO_FAILURE;
}
int FLVH264::DecodeSample(const void *inputBuffer, size_t inputBufferBytes, int32_t timestamp)
{
const uint8_t *h264_data = (const uint8_t *)inputBuffer;
if (*h264_data == 0 && inputBufferBytes >= 10) // sequence headers
{
h264_data++; // skip packet type
uint32_t timestamp_offset = Read24(h264_data);
h264_data+=3;
inputBufferBytes -=4;
h264_data+=4; // don't care about level & profile
inputBufferBytes -=4;
nalu_size_bytes = (*h264_data++ & 0x3)+1;
inputBufferBytes--;
size_t num_sps = *h264_data++ & 0x1F;
inputBufferBytes--;
for (size_t i=0;i!=num_sps;i++)
{
if (inputBufferBytes > 2)
{
uint16_t sps_size = (h264_data[0] << 8) | h264_data[1];
h264_data+=2;
inputBufferBytes-=2;
//H264_ProcessSPS(decoder, h264_data+1, sps_size);
if (inputBufferBytes >= sps_size)
{
decoder->Feed(h264_data, sps_size, timestamp+timestamp_offset);
h264_data+=sps_size;
inputBufferBytes-=sps_size;
}
}
}
if (inputBufferBytes)
{
size_t num_pps = *h264_data++;
inputBufferBytes--;
for (size_t i=0;i!=num_pps;i++)
{
if (inputBufferBytes > 2)
{
uint16_t sps_size = (h264_data[0] << 8) | h264_data[1];
h264_data+=2;
inputBufferBytes-=2;
//H264_ProcessPPS(decoder, h264_data+1, sps_size);
if (inputBufferBytes >= sps_size)
{
decoder->Feed(h264_data, sps_size, timestamp+timestamp_offset);
h264_data+=sps_size;
inputBufferBytes-=sps_size;
}
}
}
}
sequence_headers_parsed=1;
}
else if (*h264_data == 1) // frame
{
h264_data++;
inputBufferBytes--;
if (inputBufferBytes < 3)
return FLV_VIDEO_FAILURE;
uint32_t timestamp_offset = Read24(h264_data);
h264_data+=3;
inputBufferBytes-=3;
while (inputBufferBytes)
{
uint32_t this_size =GetNALUSize(nalu_size_bytes, h264_data, inputBufferBytes);
if (this_size == 0)
return FLV_VIDEO_FAILURE;
inputBufferBytes-=nalu_size_bytes;
h264_data+=nalu_size_bytes;
if (this_size > inputBufferBytes)
return FLV_VIDEO_FAILURE;
for (;;) {
HRESULT hr = decoder->Feed(h264_data, this_size, timestamp+timestamp_offset);
if (hr == MF_E_NOTACCEPTING) {
nullsoft_h264_frame_data frame_data;
if (FAILED(decoder->GetFrame((YV12_PLANES **)&frame_data.data, &frame_data.decoder_data, &frame_data.local_timestamp))) {
continue;
}
buffered_frames.push_back(frame_data);
} else if (FAILED(hr)) {
return FLV_VIDEO_FAILURE;
} else {
break;
}
}
inputBufferBytes-=this_size;
h264_data+=this_size;
}
}
return FLV_VIDEO_SUCCESS;
}
void FLVH264::Flush()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder->FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
decoder->Flush();
}
void FLVH264::Close()
{
delete this;
}
int FLVH264::GetPicture(void **data, void **decoder_data, uint64_t *timestamp)
{
if (!buffered_frames.empty()) {
nullsoft_h264_frame_data frame_data = buffered_frames[0];
buffered_frames.erase(buffered_frames.begin());
*data = frame_data.data;
*decoder_data = frame_data.decoder_data;
*timestamp = frame_data.local_timestamp;
return FLV_VIDEO_SUCCESS;
}
if (SUCCEEDED(decoder->GetFrame((YV12_PLANES **)data, decoder_data, timestamp))) {
return FLV_VIDEO_SUCCESS;
} else {
return FLV_VIDEO_FAILURE;
}
}
void FLVH264::FreePicture(void *data, void *decoder_data)
{
decoder->FreeFrame((YV12_PLANES *)data, decoder_data);
}
int FLVH264::Ready()
{
return sequence_headers_parsed;
}
#define CBCLASS FLVH264
START_DISPATCH;
CB(FLV_VIDEO_GETOUTPUTFORMAT, GetOutputFormat)
CB(FLV_VIDEO_DECODE, DecodeSample)
VCB(FLV_VIDEO_FLUSH, Flush)
VCB(FLV_VIDEO_CLOSE, Close)
CB(FLV_VIDEO_GET_PICTURE, GetPicture)
VCB(FLV_VIDEO_FREE_PICTURE, FreePicture)
CB(FLV_VIDEO_READY, Ready)
END_DISPATCH;
#undef CBCLASS

View File

@ -0,0 +1,42 @@
#pragma once
#include "../Plugins/Input/in_flv/svc_flvdecoder.h"
#include "../Plugins/Input/in_flv/FLVVideoHeader.h"
#include "../Plugins/Input/in_flv/ifc_flvvideodecoder.h"
#include "MFTDecoder.h"
#include <vector>
// {7BBC5D47-7E96-4e27-85DB-FF4190428CD0}
static const GUID flv_h264_guid =
{ 0x7bbc5d47, 0x7e96, 0x4e27, { 0x85, 0xdb, 0xff, 0x41, 0x90, 0x42, 0x8c, 0xd0 } };
class FLVDecoderCreator : public svc_flvdecoder
{
public:
static const char *getServiceName() { return "H.264 FLV Decoder"; }
static GUID getServiceGuid() { return flv_h264_guid; }
int CreateVideoDecoder(int format_type, int width, int height, ifc_flvvideodecoder **decoder);
int HandlesVideo(int format_type);
protected:
RECVS_DISPATCH;
};
class FLVH264 : public ifc_flvvideodecoder
{
public:
FLVH264(MFTDecoder *decoder);
~FLVH264();
int GetOutputFormat(int *x, int *y, int *color_format);
int DecodeSample(const void *inputBuffer, size_t inputBufferBytes, int32_t timestamp);
void Flush();
void Close();
int GetPicture(void **data, void **decoder_data, uint64_t *timestamp);
void FreePicture(void *data, void *decoder_data);
int Ready();
private:
MFTDecoder *decoder;
int sequence_headers_parsed;
uint32_t nalu_size_bytes;
std::vector<nullsoft_h264_frame_data> buffered_frames;
protected:
RECVS_DISPATCH;
};

View File

@ -0,0 +1,237 @@
#include "h264_mkv_decoder.h"
#include "../Winamp/wa_ipc.h" // for YV12_PLANES
#include <winsock.h>
#include <mmsystem.h>
#include <Mferror.h>
int MKVDecoderCreator::CreateVideoDecoder(const char *codec_id, const nsmkv::TrackEntryData *track_entry_data, const nsmkv::VideoData *video_data, ifc_mkvvideodecoder **decoder)
{
if (!strcmp(codec_id, "V_MPEG4/ISO/AVC"))
{
const uint8_t *init_data = (const uint8_t *)track_entry_data->codec_private;
size_t init_data_len = track_entry_data->codec_private_len;
if (init_data && init_data_len >= 6)
{
MFTDecoder *ctx = new MFTDecoder;
if (!ctx)
return CREATEDECODER_FAILURE;
if (FAILED(ctx->Open())) {
delete ctx;
return CREATEDECODER_FAILURE;
}
init_data+=4; // don't care about level & profile
init_data_len-=4;
// read NALU header size length
uint8_t nalu_minus_one = *init_data++ & 0x3;
init_data_len--;
// number of SPS NAL units
uint8_t num_sps = *init_data++ & 0x1F;
init_data_len--;
for (uint8_t i=0;i!=num_sps;i++)
{
if (init_data_len < 2)
{
delete ctx;
return CREATEDECODER_FAILURE;
}
uint16_t *s = (uint16_t *)init_data;
uint16_t sps_size = htons(*s);
init_data+=2;
init_data_len-=2;
if (init_data_len < sps_size)
{
delete ctx;
return CREATEDECODER_FAILURE;
}
ctx->Feed(init_data, sps_size, 0);
init_data+=sps_size;
init_data_len-=sps_size;
}
// read PPS NAL units
if (init_data_len)
{
// number of PPS NAL units
uint8_t num_pps = *init_data++ & 0x1F;
init_data_len--;
for (uint8_t i=0;i!=num_pps;i++)
{
if (init_data_len < 2)
{
delete ctx;
return CREATEDECODER_FAILURE;
}
uint16_t *s = (uint16_t *)init_data;
uint16_t pps_size = htons(*s);
init_data+=2;
init_data_len-=2;
if (init_data_len < pps_size)
{
delete ctx;
return CREATEDECODER_FAILURE;
}
ctx->Feed(init_data, pps_size, 0);
init_data+=pps_size;
init_data_len-=pps_size;
}
}
// if we made it here, we should be good
*decoder = new MKVH264(ctx, nalu_minus_one, video_data);
return CREATEDECODER_SUCCESS;
}
else
{
return CREATEDECODER_FAILURE;
}
}
else
{
return CREATEDECODER_NOT_MINE;
}
}
#define CBCLASS MKVDecoderCreator
START_DISPATCH;
CB(CREATE_VIDEO_DECODER, CreateVideoDecoder)
END_DISPATCH;
#undef CBCLASS
MKVH264::MKVH264(MFTDecoder *ctx, uint8_t nalu_minus_one, const nsmkv::VideoData *video_data) : decoder(ctx), video_data(video_data)
{
nalu_size = nalu_minus_one + 1;
width=0;
height=0;
}
MKVH264::~MKVH264()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder->FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
delete decoder;
}
int MKVH264::GetOutputProperties(int *x, int *y, int *color_format, double *aspect_ratio)
{
if (decoder) {
bool flip;
if (SUCCEEDED(decoder->GetOutputFormat(&width, &height, &flip, aspect_ratio))) {
*x = width;
*y = height;
*color_format = htonl('YV12');
return MKV_SUCCESS;
}
}
return MKV_FAILURE;
}
uint32_t GetNALUSize(uint64_t nalu_size_bytes, const uint8_t *h264_data, size_t data_len);
int MKVH264::DecodeBlock(const void *inputBuffer, size_t inputBufferBytes, uint64_t timestamp)
{
const uint8_t *h264_data = (const uint8_t *)inputBuffer;
while (inputBufferBytes) {
uint32_t this_size = GetNALUSize(nalu_size, h264_data, inputBufferBytes);
if (this_size == 0)
return MKV_FAILURE;
inputBufferBytes-=nalu_size;
h264_data+=nalu_size;
if (this_size > inputBufferBytes)
return MKV_FAILURE;
for (;;) {
HRESULT hr = decoder->Feed(h264_data, this_size, timestamp);
if (hr == MF_E_NOTACCEPTING) {
nullsoft_h264_frame_data frame_data;
if (FAILED(decoder->GetFrame((YV12_PLANES **)&frame_data.data, &frame_data.decoder_data, &frame_data.local_timestamp))) {
continue;
}
buffered_frames.push_back(frame_data);
} else if (FAILED(hr)) {
return MKV_FAILURE;
} else {
break;
}
}
inputBufferBytes-=this_size;
h264_data+=this_size;
}
return MKV_SUCCESS;
}
void MKVH264::Flush()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder->FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
if (decoder) {
decoder->Flush();
}
}
int MKVH264::GetPicture(void **data, void **decoder_data, uint64_t *timestamp)
{
if (!buffered_frames.empty()) {
nullsoft_h264_frame_data frame_data = buffered_frames[0];
buffered_frames.erase(buffered_frames.begin());
*data = frame_data.data;
*decoder_data = frame_data.decoder_data;
*timestamp = frame_data.local_timestamp;
return MKV_SUCCESS;
}
if (SUCCEEDED(decoder->GetFrame((YV12_PLANES **)data, decoder_data, timestamp))) {
return MKV_SUCCESS;
} else {
return MKV_FAILURE;
}
}
void MKVH264::FreePicture(void *data, void *decoder_data)
{
decoder->FreeFrame((YV12_PLANES *)data, decoder_data);
}
void MKVH264::EndOfStream()
{
if (decoder) {
decoder->Drain();
}
}
void MKVH264::HurryUp(int state)
{
// TODO(benski)
//if (decoder)
// H264_HurryUp(decoder, state);
}
void MKVH264::Close()
{
delete this;
}
#define CBCLASS MKVH264
START_DISPATCH;
CB(GET_OUTPUT_PROPERTIES, GetOutputProperties)
CB(DECODE_BLOCK, DecodeBlock)
VCB(FLUSH, Flush)
VCB(CLOSE, Close)
CB(GET_PICTURE, GetPicture)
VCB(FREE_PICTURE, FreePicture)
VCB(END_OF_STREAM, EndOfStream)
VCB(HURRY_UP, HurryUp)
END_DISPATCH;
#undef CBCLASS

View File

@ -0,0 +1,44 @@
#pragma once
#include "../Plugins/Input/in_mkv/ifc_mkvvideodecoder.h"
#include "../Plugins/Input/in_mkv/svc_mkvdecoder.h"
#include "MFTDecoder.h"
#include <vector>
// {8CC583E7-46DC-4736-A7CA-1159A70ADCAE}
static const GUID mkv_h264_guid =
{ 0x8cc583e7, 0x46dc, 0x4736, { 0xa7, 0xca, 0x11, 0x59, 0xa7, 0xa, 0xdc, 0xae } };
class MKVDecoderCreator : public svc_mkvdecoder
{
public:
static const char *getServiceName() { return "H.264 MKV Decoder"; }
static GUID getServiceGuid() { return mkv_h264_guid; }
int CreateVideoDecoder(const char *codec_id, const nsmkv::TrackEntryData *track_entry_data, const nsmkv::VideoData *video_data, ifc_mkvvideodecoder **decoder);
protected:
RECVS_DISPATCH;
};
class MKVH264 : public ifc_mkvvideodecoder
{
public:
MKVH264(MFTDecoder *ctx, uint8_t nalu_size_minus_one, const nsmkv::VideoData *video_data);
~MKVH264();
int GetOutputProperties(int *x, int *y, int *color_format, double *aspect_ratio);
int DecodeBlock(const void *inputBuffer, size_t inputBufferBytes, uint64_t timestamp);
void Flush();
void Close();
int GetPicture(void **data, void **decoder_data, uint64_t *timestamp);
void FreePicture(void *data, void *decoder_data);
void EndOfStream();
void HurryUp(int state);
private:
MFTDecoder *decoder;
UINT width, height;
uint8_t nalu_size;
const nsmkv::VideoData *video_data;
std::vector<nullsoft_h264_frame_data> buffered_frames;
protected:
RECVS_DISPATCH;
};

View File

@ -0,0 +1,212 @@
#include "h264_mp4_decoder.h"
#include "../winamp/wa_ipc.h"
#include <Mferror.h>
uint32_t Read24(const uint8_t *data)
{
// ugh, 24bit size
uint32_t this_size=0;
uint8_t *this_size_p = (uint8_t *)&this_size;
this_size_p[0] = data[2];
this_size_p[1] = data[1];
this_size_p[2] = data[0];
return this_size;
}
uint32_t GetNALUSize(uint64_t nalu_size_bytes, const uint8_t *h264_data, size_t data_len)
{
if ((data_len) < (nalu_size_bytes))
return 0;
switch(nalu_size_bytes)
{
case 1:
return *h264_data;
case 2:
{
return (h264_data[0] << 8) | h264_data[1];
}
case 3:
{
return Read24(h264_data);
}
case 4:
{
uint32_t this_size = *(uint32_t *)h264_data;
this_size = htonl(this_size);
return this_size;
}
}
return 0;
}
H264MP4Decoder::H264MP4Decoder()
{
nalu_size_bytes=0;
width=0;
height=0;
}
H264MP4Decoder::~H264MP4Decoder()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder.FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
}
int H264MP4Decoder::Open(MP4FileHandle mp4_file, MP4TrackId mp4_track)
{
this->mp4_file=mp4_file;
this->mp4_track=mp4_track;
decoder.Open();
// TODO error checking
uint8_t **seqHeaders = 0, **pictHeaders = 0;
uint32_t *seqHeadersSize = 0, *pictHeadersSize = 0;
__try
{
MP4GetTrackH264SeqPictHeaders(mp4_file, mp4_track,
&seqHeaders, &seqHeadersSize,
&pictHeaders, &pictHeadersSize);
if (seqHeadersSize)
{
for (uint32_t i = 0; seqHeadersSize[i] != 0; i++)
{
decoder.Feed(seqHeaders[i], seqHeadersSize[i], 0);
MP4Free(seqHeaders[i]);
}
}
MP4Free(seqHeadersSize);
if (pictHeadersSize)
{
for (uint32_t i = 0; pictHeadersSize[i] != 0; i++)
{
decoder.Feed(pictHeaders[i], pictHeadersSize[i], 0);
MP4Free(pictHeaders[i]);
}
}
MP4Free(pictHeadersSize);
MP4GetTrackH264LengthSize(mp4_file, mp4_track, &nalu_size_bytes);
}
__except(EXCEPTION_EXECUTE_HANDLER)
{
return MP4_VIDEO_FAILURE;
}
return MP4_VIDEO_SUCCESS;
}
int H264MP4Decoder::GetOutputFormat(int *x, int *y, int *color_format, double *aspect_ratio)
{
bool flip;
if (SUCCEEDED(decoder.GetOutputFormat(&width, &height, &flip, aspect_ratio))) {
*x = width;
*y = height;
*color_format = htonl('YV12');
return MP4_VIDEO_SUCCESS;
}
return MP4_VIDEO_FAILURE;
}
int H264MP4Decoder::DecodeSample(const void *inputBuffer, size_t inputBufferBytes, MP4Timestamp timestamp)
{
const uint8_t *h264_data = (const uint8_t *)inputBuffer;
while (inputBufferBytes)
{
uint32_t this_size =GetNALUSize(nalu_size_bytes, h264_data, inputBufferBytes);
if (this_size == 0)
return MP4_VIDEO_FAILURE;
inputBufferBytes-=nalu_size_bytes;
h264_data+=nalu_size_bytes;
if (this_size > inputBufferBytes)
return MP4_VIDEO_FAILURE;
for (;;) {
uint64_t hundrednanos = MP4ConvertFromTrackTimestamp(mp4_file, mp4_track, timestamp, MP4_NANOSECONDS_TIME_SCALE/100ULL);
HRESULT hr = decoder.Feed(h264_data, this_size, hundrednanos);
if (hr == MF_E_NOTACCEPTING) {
nullsoft_h264_frame_data frame_data;
if (FAILED(decoder.GetFrame((YV12_PLANES **)&frame_data.data, &frame_data.decoder_data, &frame_data.local_timestamp))) {
continue;
}
buffered_frames.push_back(frame_data);
} else if (FAILED(hr)) {
return MP4_VIDEO_FAILURE;
} else {
break;
}
}
inputBufferBytes-=this_size;
h264_data+=this_size;
}
return MP4_VIDEO_SUCCESS;
}
int H264MP4Decoder::CanHandleCodec(const char *codecName)
{
return !strcmp(codecName, "avc1");
}
void H264MP4Decoder::Flush()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder.FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
decoder.Flush();
}
int H264MP4Decoder::GetPicture(void **data, void **decoder_data, MP4Timestamp *timestamp)
{
if (!buffered_frames.empty()) {
nullsoft_h264_frame_data frame_data = buffered_frames[0];
buffered_frames.erase(buffered_frames.begin());
*data = frame_data.data;
*decoder_data = frame_data.decoder_data;
*timestamp = MP4ConvertToTrackTimestamp(mp4_file, mp4_track, frame_data.local_timestamp, MP4_NANOSECONDS_TIME_SCALE/100ULL);
return MP4_VIDEO_SUCCESS;
}
uint64_t local_timestamp;
if (SUCCEEDED(decoder.GetFrame((YV12_PLANES **)data, decoder_data, &local_timestamp))) {
*timestamp = MP4ConvertToTrackTimestamp(mp4_file, mp4_track, local_timestamp, MP4_NANOSECONDS_TIME_SCALE/100ULL);
return MP4_VIDEO_SUCCESS;
} else {
return MP4_VIDEO_FAILURE;
}
}
void H264MP4Decoder::FreePicture(void *data, void *decoder_data)
{
decoder.FreeFrame((YV12_PLANES *)data, decoder_data);
}
void H264MP4Decoder::HurryUp(int state)
{
// TODO if (decoder)
//H264_HurryUp(decoder, state);
}
#define CBCLASS H264MP4Decoder
START_DISPATCH;
CB(MPEG4_VIDEO_OPEN, Open)
CB(MPEG4_VIDEO_GETOUTPUTFORMAT, GetOutputFormat)
CB(MPEG4_VIDEO_DECODE, DecodeSample)
CB(MPEG4_VIDEO_HANDLES_CODEC, CanHandleCodec)
VCB(MPEG4_VIDEO_FLUSH, Flush)
CB(MPEG4_VIDEO_GET_PICTURE, GetPicture)
VCB(MPEG4_VIDEO_FREE_PICTURE, FreePicture)
VCB(MPEG4_VIDEO_HURRY_UP, HurryUp)
END_DISPATCH;
#undef CBCLASS

View File

@ -0,0 +1,37 @@
#pragma once
#include "../Plugins/Input/in_mp4/mpeg4video.h"
#include "MFTDecoder.h"
#include <vector>
// {F13CB206-E8F2-4353-B8B9-587D02CB701C}
static const GUID mp4_h264_guid =
{ 0xf13cb206, 0xe8f2, 0x4353, { 0xb8, 0xb9, 0x58, 0x7d, 0x2, 0xcb, 0x70, 0x1c } };
class H264MP4Decoder : public MP4VideoDecoder
{
public:
static const char *getServiceName() { return "H.264 MP4 Decoder"; }
static GUID getServiceGuid() { return mp4_h264_guid; }
H264MP4Decoder();
~H264MP4Decoder();
private:
/* mpeg4video interface */
int Open(MP4FileHandle mp4_file, MP4TrackId mp4_track);
int GetOutputFormat(int *x, int *y, int *color_format, double *aspect_ratio);
int DecodeSample(const void *inputBuffer, size_t inputBufferBytes, MP4Timestamp timestamp);
void Flush();
void Close();
int CanHandleCodec(const char *codecName);
int GetPicture(void **data, void **decoder_data, MP4Timestamp *timestamp);
void FreePicture(void *data, void *decoder_data);
void HurryUp(int state);
MFTDecoder decoder;
uint32_t nalu_size_bytes;
UINT width, height;
MP4FileHandle mp4_file;
MP4TrackId mp4_track;
std::vector<nullsoft_h264_frame_data> buffered_frames;
protected:
RECVS_DISPATCH;
};

96
Src/h264/main.cpp Normal file
View File

@ -0,0 +1,96 @@
#define WIN32_LEAN_AND_MEAN
#include "api__h264.h"
#include <bfc/platform/export.h>
#include "../Agave/Component/ifc_wa5component.h"
#include "../nu/Singleton.h"
#include "../nu/factoryt.h"
#include "h264_flv_decoder.h"
#include "h264_mp4_decoder.h"
#include "h264_mkv_decoder.h"
#include "avi_h264_decoder.h"
#include "NSVFactory.h"
api_service *WASABI_API_SVC=0;
api_memmgr *WASABI_API_MEMMGR=0;
api_winamp *AGAVE_API_WINAMP=0;
class H264Component : public ifc_wa5component
{
public:
void RegisterServices(api_service *service);
void DeregisterServices(api_service *service);
protected:
RECVS_DISPATCH;
};
template <class api_T>
void ServiceBuild(api_T *&api_t, GUID factoryGUID_t)
{
if (WASABI_API_SVC)
{
waServiceFactory *factory = WASABI_API_SVC->service_getServiceByGuid(factoryGUID_t);
if (factory)
api_t = reinterpret_cast<api_T *>( factory->getInterface() );
}
}
template <class api_T>
void ServiceRelease(api_T *api_t, GUID factoryGUID_t)
{
if (WASABI_API_SVC)
{
waServiceFactory *factory = WASABI_API_SVC->service_getServiceByGuid(factoryGUID_t);
if (factory)
factory->releaseInterface(api_t);
}
api_t = NULL;
}
static FLVDecoderCreator flvCreator;
static SingletonServiceFactory<svc_flvdecoder, FLVDecoderCreator> flvFactory;
static MKVDecoderCreator mkvCreator;
static SingletonServiceFactory<svc_mkvdecoder, MKVDecoderCreator> mkvFactory;
static AVIDecoderCreator aviCreator;
static SingletonServiceFactory<svc_avidecoder, AVIDecoderCreator> aviFactory;
static ServiceFactoryT<MP4VideoDecoder, H264MP4Decoder> mp4Factory;
static NSVFactory nsvCreator;
static SingletonServiceFactory<svc_nsvFactory, NSVFactory> nsvFactory;
void H264Component::RegisterServices(api_service *service)
{
WASABI_API_SVC = service;
ServiceBuild(AGAVE_API_WINAMP, winampApiGuid);
if (!AGAVE_API_WINAMP || AGAVE_API_WINAMP->GetRegVer() >= 1)
{
ServiceBuild(WASABI_API_MEMMGR, memMgrApiServiceGuid);
mp4Factory.Register(WASABI_API_SVC);
flvFactory.Register(WASABI_API_SVC, &flvCreator);
mkvFactory.Register(WASABI_API_SVC, &mkvCreator);
aviFactory.Register(WASABI_API_SVC, &aviCreator);
nsvFactory.Register(WASABI_API_SVC, &nsvCreator);
}
}
void H264Component::DeregisterServices(api_service *service)
{
mp4Factory.Deregister(WASABI_API_SVC);
flvFactory.Deregister(WASABI_API_SVC);
mkvFactory.Deregister(WASABI_API_SVC);
aviFactory.Deregister(WASABI_API_SVC);
nsvFactory.Deregister(WASABI_API_SVC);
ServiceRelease(WASABI_API_MEMMGR, memMgrApiServiceGuid);
ServiceRelease(AGAVE_API_WINAMP, winampApiGuid);
}
static H264Component component;
extern "C" DLLEXPORT ifc_wa5component *GetWinamp5SystemComponent()
{
return &component;
}
#define CBCLASS H264Component
START_DISPATCH;
VCB(API_WA5COMPONENT_REGISTERSERVICES, RegisterServices)
VCB(API_WA5COMPONENT_DEREEGISTERSERVICES, DeregisterServices)
END_DISPATCH;
#undef CBCLASS

View File

@ -0,0 +1,77 @@
#include "nsv_h264_decoder.h"
#include "../nsv/nsvlib.h"
#include "../nsv/dec_if.h"
#include <assert.h>
#include <Mferror.h>
H264_Decoder::H264_Decoder()
{
vidbufdec=0;
last_pic = 0;
decoder.Open();
}
H264_Decoder::~H264_Decoder()
{
for (size_t i=0;i<buffered_frames.size();i++) {
nullsoft_h264_frame_data frame_data = buffered_frames[i];
decoder.FreeFrame((YV12_PLANES *)frame_data.data, frame_data.decoder_data);
}
decoder.FreeFrame(vidbufdec, last_pic);
}
int H264_Decoder::decode(int need_kf,
void *_in, int _in_len,
void **out, // out is set to a pointer to data
unsigned int *out_type, // 'Y','V','1','2' is currently defined
int *is_kf)
{
*out_type=NSV_MAKETYPE('Y','V','1','2');
if (last_pic)
{
decoder.FreeFrame(vidbufdec, last_pic);
vidbufdec=0;
last_pic=0;
}
if (_in_len) {
for (;;) {
HRESULT hr = decoder.FeedRaw(_in, _in_len, 0);
if (hr == MF_E_NOTACCEPTING) {
nullsoft_h264_frame_data frame_data;
if (FAILED(decoder.GetFrame((YV12_PLANES **)&frame_data.data, &frame_data.decoder_data, &frame_data.local_timestamp))) {
continue;
}
buffered_frames.push_back(frame_data);
} else if (FAILED(hr)) {
return -1;
} else {
break;
}
}
} else {
decoder.Drain();
}
if (SUCCEEDED(decoder.GetFrame(&vidbufdec, &last_pic, 0))) {
*out = vidbufdec;
*is_kf = 1;
} else {
*out = 0;
}
return 0;
}
void H264_Decoder::flush()
{
for ( size_t i = 0; i < buffered_frames.size(); i++ )
{
nullsoft_h264_frame_data frame_data = buffered_frames[ i ];
decoder.FreeFrame( (YV12_PLANES *)frame_data.data, frame_data.decoder_data );
}
decoder.Flush();
}

View File

@ -0,0 +1,24 @@
#pragma once
#include "../nsv/dec_if.h"
#include "annexb.h"
#include "MFTDecoder.h"
#include <vector>
class H264_Decoder : public IVideoDecoder
{
public:
H264_Decoder();
~H264_Decoder();
int decode(int need_kf,
void *in, int in_len,
void **out, // out is set to a pointer to data
unsigned int *out_type, // 'Y','V','1','2' is currently defined
int *is_kf);
void flush();
private:
MFTDecoder decoder;
YV12_PLANES *vidbufdec;
void *last_pic;
std::vector<nullsoft_h264_frame_data> buffered_frames;
};

14
Src/h264/resource.h Normal file
View File

@ -0,0 +1,14 @@
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by h264.rc
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 101
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1001
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif

39
Src/h264/version.rc2 Normal file
View File

@ -0,0 +1,39 @@
/////////////////////////////////////////////////////////////////////////////
//
// Version
//
#include "../Winamp/buildType.h"
VS_VERSION_INFO VERSIONINFO
FILEVERSION WINAMP_PRODUCTVER
PRODUCTVERSION WINAMP_PRODUCTVER
FILEFLAGSMASK 0x17L
#ifdef _DEBUG
FILEFLAGS 0x1L
#else
FILEFLAGS 0x0L
#endif
FILEOS 0x4L
FILETYPE 0x2L
FILESUBTYPE 0x0L
BEGIN
BLOCK "StringFileInfo"
BEGIN
BLOCK "040904b0"
BEGIN
VALUE "CompanyName", "Winamp SA"
VALUE "FileDescription", "Winamp 5.x System Component"
VALUE "FileVersion", STR_WINAMP_PRODUCTVER
VALUE "InternalName", "h264.w5s"
VALUE "LegalCopyright", "Copyright <20> 2015-2023 Ben Allison"
VALUE "LegalTrademarks", "Nullsoft and Winamp are trademarks of Winamp SA"
VALUE "OriginalFilename", "h264.w5s"
VALUE "ProductName", "Winamp H.264 Decoder Service"
VALUE "ProductVersion", STR_WINAMP_PRODUCTVER
END
END
BLOCK "VarFileInfo"
BEGIN
VALUE "Translation", 0x409, 1200
END
END