ビデオ フレームを RGB32 から IYUV に変換する必要がありますが、カラー コンバーター MFT がサンプルの処理を拒否します。フレームごとに IMFTransform::ProcessInput() と IMFTransform::ProcessOutput() を呼び出しますが、MF_E_TRANSFORM_NEED_MORE_INPUTを受け取ります。MFT に別のサンプルをフィードしようとすると、MF_E_NOTACCEPTINGエラーが発生します。
以下に、問題を示すコードを貼り付けます。うまくいけば、あなたたちは助けてくれるでしょう。
まず、メディア タイプを作成します。
//DSP input MediaType
CHECK_HR(hr = MFCreateMediaType(&m_pInputMediaType));
CHECK_HR(hr = m_pInputMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
CHECK_HR(hr = m_pInputMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32))
CHECK_HR(hr = m_pInputMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive))
CHECK_HR(hr = MFSetAttributeSize(m_pInputMediaType, MF_MT_FRAME_SIZE, m_pStreamParams->StreamWidth, m_pStreamParams->StreamHeight))
CHECK_HR(hr = MFSetAttributeRatio(m_pInputMediaType, MF_MT_FRAME_RATE, m_pStreamParams->StreamFramerate, 1))
CHECK_HR(hr = MFSetAttributeRatio(m_pInputMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
//DSP output MediaType
CHECK_HR(hr = MFCreateMediaType(&m_pIntermediateMediaType));
CHECK_HR(hr = m_pIntermediateMediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
CHECK_HR(hr = m_pIntermediateMediaType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_IYUV));
CHECK_HR(hr = m_pIntermediateMediaType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHECK_HR(hr = MFSetAttributeSize(m_pIntermediateMediaType, MF_MT_FRAME_SIZE, m_pStreamParams->StreamWidth, m_pStreamParams->StreamHeight));
CHECK_HR(hr = MFSetAttributeRatio(m_pIntermediateMediaType, MF_MT_FRAME_RATE, m_pStreamParams->StreamFramerate, 1));
CHECK_HR(hr = MFSetAttributeRatio(m_pIntermediateMediaType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
次に、DSP を初期化し、MediaTypes を設定します。
CHECK_HR(hr = CoCreateInstance(CLSID_CColorConvertDMO, NULL, CLSCTX_ALL, IID_PPV_ARGS(&m_pColorDSP)));
CHECK_HR(hr = m_pColorDSP->SetInputType(0, m_pInputMediaType, 0));
CHECK_HR(hr = m_pColorDSP->SetOutputType(0, m_pIntermediateMediaType, 0));
そして私のフレーム処理方法
HRESULT LibStreaming3::WriteFrame(DWORD* videoFrameBuffer)
{
IMFSample *pRGBSample = NULL;
IMFMediaBuffer *pBuffer = NULL;
if(!m_bStreaming)
{
LOG("Failed: Not streaming!");
return E_FAIL;
}
assert(m_pStreamParams);
assert(m_pH264Encoder);
assert(m_pColorDSP);
const LONG cbWidth = 4 * m_pStreamParams->StreamWidth;
const DWORD cbBuffer = cbWidth * m_pStreamParams->StreamHeight;
BYTE *pData = NULL;
// Create a new memory buffer.
HRESULT hr = MFCreateMemoryBuffer(cbBuffer, &pBuffer);
// Lock the buffer and copy the video frame to the buffer.
if (SUCCEEDED(hr))
{
hr = pBuffer->Lock(&pData, NULL, NULL);
}
if (SUCCEEDED(hr))
{
hr = MFCopyImage(
pData, // Destination buffer.
cbWidth, // Destination stride.
(BYTE*)videoFrameBuffer, // First row in source image.
cbWidth, // Source stride.
cbWidth, // Image width in bytes.
m_pStreamParams->StreamHeight // Image height in pixels.
);
}
if (pBuffer)
{
pBuffer->Unlock();
}
do
{
// Set the data length of the buffer.
CHECK_HR(hr = pBuffer->SetCurrentLength(cbBuffer));
// Create a media sample and add the buffer to the sample.
CHECK_HR(hr = MFCreateSample(&pRGBSample));
CHECK_HR(hr = pRGBSample->AddBuffer(pBuffer));
// Set the time stamp and the duration.
CHECK_HR(hr = pRGBSample->SetSampleTime(m_rtStart));
CHECK_HR(hr = pRGBSample->SetSampleDuration(m_rtDuration));
/************************************************************************/
/* CONVERT COLORS */
/************************************************************************/
MFT_OUTPUT_DATA_BUFFER IYUVOutputDataBuffer;
IYUVOutputDataBuffer.dwStreamID = 0;
IYUVOutputDataBuffer.dwStatus = 0;
IYUVOutputDataBuffer.pEvents = NULL;
IYUVOutputDataBuffer.pSample = NULL;
DWORD dwDSPStatus = 0;
//IMFSample* pIYUVSample = NULL;
MFT_INPUT_STREAM_INFO info;
hr = m_pColorDSP->GetInputStreamInfo(0,&info );
hr = m_pColorDSP->ProcessInput(0, pRGBSample, 0); //Will provide only one sample, every next call will result in MF_E_NOTACCEPTING
hr = m_pColorDSP->ProcessOutput(0, 1, &IYUVOutputDataBuffer,&dwDSPStatus); // Always returns MF_E_TRANSFORM_NEED_MORE_INPUT
} while (false);
m_rtStart += m_rtDuration;
SafeRelease(&pRGBSample);
SafeRelease(&pBuffer);
return hr;
}
よろしく、
パヴェル