diff --git a/MAINTAINERS b/MAINTAINERS index 59873f6804c..c4afb5da88c 100644 --- a/MAINTAINERS +++ b/MAINTAINERS @@ -217,7 +217,6 @@ P: Zebediah Figura F: dlls/mf/tests/transform.c F: dlls/winegstreamer/aac_decoder.c F: dlls/winegstreamer/color_convert.c -F: dlls/winegstreamer/h264_decoder.c F: dlls/winegstreamer/media_source.c F: dlls/winegstreamer/mfplat.c F: dlls/winegstreamer/resampler.c diff --git a/dlls/evr/evr.spec b/dlls/evr/evr.spec index 0a3a3fb6d42..7383589573b 100644 --- a/dlls/evr/evr.spec +++ b/dlls/evr/evr.spec @@ -8,7 +8,7 @@ @ stub MFConvertToFP16Array @ stdcall -import MFCopyImage(ptr long ptr long long long) @ stdcall -import MFCreateDXSurfaceBuffer(ptr ptr long ptr) -@ stub MFCreateVideoMediaType +@ stdcall -import MFCreateVideoMediaType(ptr ptr) @ stub MFCreateVideoMediaTypeFromBitMapInfoHeader @ stdcall -import MFCreateVideoMediaTypeFromSubtype(ptr ptr) @ stub MFCreateVideoMediaTypeFromVideoInfoHeader2 diff --git a/dlls/evr/mixer.c b/dlls/evr/mixer.c index 222fc538fec..deedf031472 100644 --- a/dlls/evr/mixer.c +++ b/dlls/evr/mixer.c @@ -45,7 +45,7 @@ struct input_stream IMFMediaType *media_type; MFVideoNormalizedRect rect; unsigned int zorder; - SIZE frame_size; + MFVideoArea aperture; IMFSample *sample; unsigned int sample_requested : 1; }; @@ -102,7 +102,6 @@ struct video_mixer COLORREF rgba; DXVA2_AYUVSample16 ayuv; } bkgnd_color; - MFVideoArea aperture; LONGLONG lower_bound; LONGLONG upper_bound; CRITICAL_SECTION cs; @@ -763,7 +762,7 @@ static HRESULT video_mixer_collect_output_types(struct video_mixer *mixer, const if (count && !(flags & MFT_SET_TYPE_TEST_ONLY)) { - UINT32 fixed_samples, interlace_mode; + UINT32 fixed_samples, interlace_mode, width = video_desc->SampleWidth, height = video_desc->SampleHeight; MFVideoArea aperture; UINT64 par; @@ -775,12 +774,18 @@ static HRESULT video_mixer_collect_output_types(struct video_mixer *mixer, const memcpy(&subtype, &MFVideoFormat_Base, sizeof(subtype)); memset(&aperture, 0, sizeof(aperture)); - if (FAILED(IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (UINT8 *)&aperture, + if (SUCCEEDED(IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (UINT8 *)&aperture, sizeof(aperture), NULL))) { - aperture.Area.cx = video_desc->SampleWidth; - aperture.Area.cy = video_desc->SampleHeight; + width = aperture.OffsetX.value + aperture.Area.cx; + height = aperture.OffsetX.value + aperture.Area.cy; + } + else + { + aperture.Area.cx = width; + aperture.Area.cy = height; } + interlace_mode = video_mixer_get_interlace_mode_from_video_desc(video_desc); mf_get_attribute_uint64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &par, (UINT64)1 << 32 | 1); mf_get_attribute_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &fixed_samples, 1); @@ -795,7 +800,7 @@ static HRESULT video_mixer_collect_output_types(struct video_mixer *mixer, const MFCreateMediaType(&rt_media_type); IMFMediaType_CopyAllItems(media_type, (IMFAttributes *)rt_media_type); IMFMediaType_SetGUID(rt_media_type, &MF_MT_SUBTYPE, &subtype); - IMFMediaType_SetUINT64(rt_media_type, &MF_MT_FRAME_SIZE, (UINT64)aperture.Area.cx << 32 | aperture.Area.cy); + IMFMediaType_SetUINT64(rt_media_type, &MF_MT_FRAME_SIZE, (UINT64)width << 32 | height); IMFMediaType_SetBlob(rt_media_type, &MF_MT_GEOMETRIC_APERTURE, (const UINT8 *)&aperture, sizeof(aperture)); IMFMediaType_SetBlob(rt_media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (const UINT8 *)&aperture, sizeof(aperture)); IMFMediaType_SetUINT32(rt_media_type, &MF_MT_INTERLACE_MODE, interlace_mode); @@ -884,8 +889,15 @@ static HRESULT WINAPI video_mixer_transform_SetInputType(IMFTransform *iface, DW if (mixer->inputs[0].media_type) IMFMediaType_Release(mixer->inputs[0].media_type); mixer->inputs[0].media_type = media_type; - mixer->inputs[0].frame_size.cx = video_desc.SampleWidth; - mixer->inputs[0].frame_size.cy = video_desc.SampleHeight; + + if (FAILED(IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, + (BYTE *)&mixer->inputs[0].aperture, sizeof(mixer->inputs[0].aperture), NULL))) + { + memset(&mixer->inputs[0].aperture, 0, sizeof(mixer->inputs[0].aperture)); + mixer->inputs[0].aperture.Area.cx = video_desc.SampleWidth; + mixer->inputs[0].aperture.Area.cy = video_desc.SampleHeight; + } + IMFMediaType_AddRef(mixer->inputs[0].media_type); } CoTaskMemFree(guids); @@ -962,11 +974,6 @@ static HRESULT WINAPI video_mixer_transform_SetOutputType(IMFTransform *iface, D if (SUCCEEDED(hr = IDirectXVideoProcessorService_CreateVideoProcessor(service, &mixer->output.rt_formats[i].device, &video_desc, rt_format, MAX_MIXER_INPUT_SUBSTREAMS, &mixer->processor))) { - if (FAILED(IMFMediaType_GetBlob(type, &MF_MT_GEOMETRIC_APERTURE, (UINT8 *)&mixer->aperture, - sizeof(mixer->aperture), NULL))) - { - memset(&mixer->aperture, 0, sizeof(mixer->aperture)); - } if (mixer->output.media_type) IMFMediaType_Release(mixer->output.media_type); mixer->output.media_type = type; @@ -1297,9 +1304,9 @@ static void video_mixer_render(struct video_mixer *mixer, IDirect3DSurface9 *rt) DXVA2_VideoProcessBltParams params = { 0 }; MFVideoNormalizedRect zoom_rect; struct input_stream *stream; + MFVideoArea aperture; HRESULT hr = S_OK; unsigned int i; - RECT dst; if (FAILED(IMFAttributes_GetBlob(mixer->attributes, &VIDEO_ZOOM_RECT, (UINT8 *)&zoom_rect, sizeof(zoom_rect), NULL))) @@ -1308,8 +1315,11 @@ static void video_mixer_render(struct video_mixer *mixer, IDirect3DSurface9 *rt) zoom_rect.right = zoom_rect.bottom = 1.0f; } - SetRect(&dst, 0, 0, mixer->aperture.Area.cx, mixer->aperture.Area.cy); - OffsetRect(&dst, mixer->aperture.OffsetX.value, mixer->aperture.OffsetY.value); + if (FAILED(IMFMediaType_GetBlob(mixer->output.media_type, &MF_MT_GEOMETRIC_APERTURE, + (UINT8 *)&aperture, sizeof(aperture), NULL))) + aperture = mixer->inputs[0].aperture; + SetRect(¶ms.TargetRect, 0, 0, aperture.Area.cx, aperture.Area.cy); + OffsetRect(¶ms.TargetRect, aperture.OffsetX.value, aperture.OffsetY.value); for (i = 0; i < mixer->input_count; ++i) { @@ -1326,8 +1336,9 @@ static void video_mixer_render(struct video_mixer *mixer, IDirect3DSurface9 *rt) /* Full input frame corrected to full destination rectangle. */ - video_mixer_scale_rect(&sample->SrcRect, stream->frame_size.cx, stream->frame_size.cy, &zoom_rect); - CopyRect(&sample->DstRect, &dst); + video_mixer_scale_rect(&sample->SrcRect, stream->aperture.Area.cx, stream->aperture.Area.cy, &zoom_rect); + OffsetRect(&sample->SrcRect, stream->aperture.OffsetX.value, stream->aperture.OffsetY.value); + CopyRect(&sample->DstRect, ¶ms.TargetRect); video_mixer_correct_aspect_ratio(&sample->SrcRect, &sample->DstRect); if (video_mixer_rect_needs_scaling(&stream->rect)) @@ -1340,9 +1351,6 @@ static void video_mixer_render(struct video_mixer *mixer, IDirect3DSurface9 *rt) if (SUCCEEDED(hr)) { - SetRect(¶ms.TargetRect, 0, 0, mixer->aperture.Area.cx, mixer->aperture.Area.cy); - OffsetRect(¶ms.TargetRect, mixer->aperture.OffsetX.value, mixer->aperture.OffsetY.value); - params.BackgroundColor = mixer->bkgnd_color.ayuv; params.Alpha = DXVA2_Fixed32OpaqueAlpha(); diff --git a/dlls/evr/presenter.c b/dlls/evr/presenter.c index dfbc61739cc..7ad59ccc958 100644 --- a/dlls/evr/presenter.c +++ b/dlls/evr/presenter.c @@ -361,7 +361,6 @@ static HRESULT video_presenter_configure_output_type(struct video_presenter *pre static HRESULT video_presenter_invalidate_media_type(struct video_presenter *presenter) { IMFMediaType *media_type, *candidate_type; - MFVideoArea aperture = {{ 0 }}; unsigned int idx = 0; RECT rect; HRESULT hr; @@ -374,18 +373,23 @@ static HRESULT video_presenter_invalidate_media_type(struct video_presenter *pre video_presenter_get_native_video_size(presenter); - rect = presenter->dst_rect; - if (rect.left == 0 && rect.right == 0 && rect.bottom == 0 && rect.top == 0) + while (SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(presenter->mixer, 0, idx++, &candidate_type))) { - rect.right = presenter->native_size.cx; - rect.bottom = presenter->native_size.cy; - } + MFVideoArea aperture = {{ 0 }}; - aperture.Area.cx = rect.right - rect.left; - aperture.Area.cy = rect.bottom - rect.top; + rect = presenter->dst_rect; + if (!IsRectEmpty(&rect)) + { + aperture.Area.cx = rect.right - rect.left; + aperture.Area.cy = rect.bottom - rect.top; + } + else if (FAILED(IMFMediaType_GetBlob(candidate_type, &MF_MT_GEOMETRIC_APERTURE, (UINT8 *)&aperture, + sizeof(aperture), NULL))) + { + aperture.Area.cx = presenter->native_size.cx; + aperture.Area.cy = presenter->native_size.cy; + } - while (SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(presenter->mixer, 0, idx++, &candidate_type))) - { /* FIXME: check that d3d device supports this format */ if (FAILED(hr = IMFMediaType_CopyAllItems(candidate_type, (IMFAttributes *)media_type))) diff --git a/dlls/evr/tests/evr.c b/dlls/evr/tests/evr.c index 25fb1978443..4674221b86d 100644 --- a/dlls/evr/tests/evr.c +++ b/dlls/evr/tests/evr.c @@ -39,34 +39,39 @@ static void load_resource(const WCHAR *filename, const BYTE **data, DWORD *lengt *length = SizeofResource(GetModuleHandleW(NULL), resource); } -static DWORD compare_rgb32(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +static DWORD compare_rgb(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect, UINT bits) { - DWORD x, y, size, diff = 0, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD x, y, step = bits / 8, data_size, diff = 0, width = size->cx, height = size->cy; /* skip BMP header from the dump */ - size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); - *length = *length + size; - expect = expect + size; + data_size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); + *length = *length + data_size; + expect = expect + data_size; - for (y = 0; y < height; y++, data += width * 4, expect += width * 4) + for (y = 0; y < height; y++, data += width * step, expect += width * step) { if (y < rect->top || y >= rect->bottom) continue; for (x = 0; x < width; x++) { if (x < rect->left || x >= rect->right) continue; - diff += abs((int)expect[4 * x + 0] - (int)data[4 * x + 0]); - diff += abs((int)expect[4 * x + 1] - (int)data[4 * x + 1]); - diff += abs((int)expect[4 * x + 2] - (int)data[4 * x + 2]); + diff += abs((int)expect[step * x + 0] - (int)data[step * x + 0]); + diff += abs((int)expect[step * x + 1] - (int)data[step * x + 1]); + if (step >= 3) diff += abs((int)expect[step * x + 2] - (int)data[step * x + 2]); } } - size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3; - return diff * 100 / 256 / size; + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * min(step, 3); + return diff * 100 / 256 / data_size; } -static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +static DWORD compare_rgb32(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - DWORD width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + return compare_rgb(data, length, size, rect, expect, 32); +} + +static void dump_rgb(const BYTE *data, DWORD length, const SIZE *size, HANDLE output, UINT bits) +{ + DWORD width = size->cx, height = size->cy; static const char magic[2] = "BM"; struct { @@ -80,7 +85,7 @@ static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE .biHeader = { .biSize = sizeof(BITMAPINFOHEADER), .biWidth = width, .biHeight = height, .biPlanes = 1, - .biBitCount = 32, .biCompression = BI_RGB, .biSizeImage = width * height * 4, + .biBitCount = bits, .biCompression = BI_RGB, .biSizeImage = width * height * (bits / 8), }, }; DWORD written; @@ -97,9 +102,15 @@ static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE ok(written == length, "written %lu bytes\n", written); } +static void dump_rgb32(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) +{ + return dump_rgb(data, length, size, output, 32); +} + #define check_rgb32_data(a, b, c, d) check_rgb32_data_(__LINE__, a, b, c, d) static DWORD check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, DWORD length, const RECT *rect) { + SIZE size = {rect->right, rect->bottom}; WCHAR output_path[MAX_PATH]; const BYTE *expect_data; HRSRC resource; @@ -109,7 +120,7 @@ static DWORD check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data lstrcatW(output_path, filename); output = CreateFileW(output_path, GENERIC_READ|GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, 0, 0); ok(output != INVALID_HANDLE_VALUE, "CreateFileW failed, error %lu\n", GetLastError()); - dump_rgb32(data, length, rect, output); + dump_rgb32(data, length, &size, output); trace("created %s\n", debugstr_w(output_path)); CloseHandle(output); @@ -117,7 +128,7 @@ static DWORD check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data ok(resource != 0, "FindResourceW failed, error %lu\n", GetLastError()); expect_data = LockResource(LoadResource(GetModuleHandleW(NULL), resource)); - return compare_rgb32(data, &length, rect, expect_data); + return compare_rgb32(data, &length, &size, rect, expect_data); } static void set_rect(MFVideoNormalizedRect *rect, float left, float top, float right, float bottom) @@ -3318,9 +3329,8 @@ static void test_mixer_samples(void) DestroyWindow(window); } -static void test_presenter_orientation(const GUID *subtype) +static void create_d3d_sample(IDirect3DDeviceManager9 *manager, const GUID *subtype, IMFSample **sample) { - IMFTopologyServiceLookupClient *lookup_client; static const BITMAPINFOHEADER expect_header = { .biSize = sizeof(BITMAPINFOHEADER), @@ -3329,23 +3339,119 @@ static void test_presenter_orientation(const GUID *subtype) .biCompression = BI_RGB, .biSizeImage = 96 * 96 * 4, }; + DWORD data_size, frame_data_len; + D3DLOCKED_RECT d3d_rect = {0}; + IDirect3DSurface9 *surface; + const BYTE *frame_data; + LONG stride; + HRESULT hr; + + if (IsEqualGUID(subtype, &MFVideoFormat_NV12)) + { + load_resource(L"nv12frame.bmp", &frame_data, &frame_data_len); + /* skip BMP header and RGB data from the dump */ + data_size = *(DWORD *)(frame_data + 2); + frame_data_len = frame_data_len - data_size; + frame_data = frame_data + data_size; + ok(frame_data_len == 13824, "got length %lu\n", frame_data_len); + } + else + { + load_resource(L"rgb32frame.bmp", &frame_data, &frame_data_len); + /* skip BMP header from the dump */ + data_size = *(DWORD *)(frame_data + 2 + 2 * sizeof(DWORD)); + frame_data_len -= data_size; + frame_data += data_size; + ok(frame_data_len == 36864, "got length %lu\n", frame_data_len); + } + + surface = create_surface(manager, subtype->Data1, expect_header.biWidth, expect_header.biHeight); + ok(!!surface, "Failed to create input surface.\n"); + hr = IDirect3DSurface9_LockRect(surface, &d3d_rect, NULL, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (IsEqualGUID(subtype, &MFVideoFormat_RGB32)) + memcpy(d3d_rect.pBits, frame_data, frame_data_len); + else if (IsEqualGUID(subtype, &MFVideoFormat_NV12)) + { + hr = MFGetStrideForBitmapInfoHeader(subtype->Data1, expect_header.biWidth, &stride); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCopyImage(d3d_rect.pBits, d3d_rect.Pitch, frame_data, stride, expect_header.biWidth, expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + frame_data += stride * expect_header.biHeight; + d3d_rect.pBits = (BYTE *)d3d_rect.pBits + d3d_rect.Pitch * expect_header.biHeight; + hr = MFCopyImage(d3d_rect.pBits, d3d_rect.Pitch, frame_data, stride, expect_header.biWidth, expect_header.biHeight / 2); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + } + hr = IDirect3DSurface9_UnlockRect(surface); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateVideoSampleFromSurface((IUnknown *)surface, sample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IDirect3DSurface9_Release(surface); +} + +#define check_presenter_output(a, b, c, d) check_presenter_output_(__LINE__, a, b, c, d, FALSE) +static DWORD check_presenter_output_(int line, IMFVideoPresenter *presenter, const BITMAPINFOHEADER *expect_header, + const WCHAR *resource, const RECT *rect, BOOL todo) +{ BITMAPINFOHEADER header = {.biSize = sizeof(BITMAPINFOHEADER)}; IMFVideoDisplayControl *display_control; - DWORD diff, data_size, frame_data_len; + DWORD diff, data_size; + LONGLONG timestamp; + BYTE *data; + HRESULT hr; + + hr = IMFVideoPresenter_QueryInterface(presenter, &IID_IMFVideoDisplayControl, (void **)&display_control); + ok_(__FILE__, line)(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoDisplayControl_GetCurrentImage(display_control, &header, &data, &data_size, ×tamp); + if (hr == MF_E_INVALIDREQUEST) + { + Sleep(500); + hr = IMFVideoDisplayControl_GetCurrentImage(display_control, &header, &data, &data_size, ×tamp); + } + ok_(__FILE__, line)(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFVideoDisplayControl_Release(display_control); + + ok_(__FILE__, line)(header.biSize == expect_header->biSize, "Unexpected biSize %#lx\n", header.biSize); + todo_wine_if(todo) + ok_(__FILE__, line)(header.biWidth == expect_header->biWidth, "Unexpected biWidth %#lx\n", header.biWidth); + todo_wine_if(todo) + ok_(__FILE__, line)(header.biHeight == expect_header->biHeight, "Unexpected biHeight %#lx\n", header.biHeight); + ok_(__FILE__, line)(header.biPlanes == expect_header->biPlanes, "Unexpected biPlanes %#x\n", header.biPlanes); + ok_(__FILE__, line)(header.biBitCount == expect_header->biBitCount, "Unexpected biBitCount %#x\n", header.biBitCount); + ok_(__FILE__, line)(header.biCompression == expect_header->biCompression, "Unexpected biCompression %#lx\n", header.biCompression); + todo_wine_if(todo) + ok_(__FILE__, line)(header.biSizeImage == expect_header->biSizeImage, "Unexpected biSizeImage %#lx\n", header.biSizeImage); + ok_(__FILE__, line)(header.biXPelsPerMeter == expect_header->biXPelsPerMeter, "Unexpected biXPelsPerMeter %#lx\n", header.biXPelsPerMeter); + ok_(__FILE__, line)(header.biYPelsPerMeter == expect_header->biYPelsPerMeter, "Unexpected biYPelsPerMeter %#lx\n", header.biYPelsPerMeter); + ok_(__FILE__, line)(header.biClrUsed == expect_header->biClrUsed, "Unexpected biClrUsed %#lx\n", header.biClrUsed); + ok_(__FILE__, line)(header.biClrImportant == expect_header->biClrImportant, "Unexpected biClrImportant %#lx\n", header.biClrImportant); + + diff = check_rgb32_data(resource, data, header.biSizeImage, rect); + CoTaskMemFree(data); + + return diff; +} + +static void test_presenter_orientation(const GUID *subtype) +{ + IMFTopologyServiceLookupClient *lookup_client; + static const BITMAPINFOHEADER expect_header = + { + .biSize = sizeof(BITMAPINFOHEADER), + .biWidth = 96, .biHeight = 96, + .biPlanes = 1, .biBitCount = 32, + .biCompression = BI_RGB, + .biSizeImage = 96 * 96 * 4, + }; IDirect3DDeviceManager9 *manager; - D3DLOCKED_RECT d3d_rect = {0}; IMFVideoPresenter *presenter; - IDirect3DSurface9 *surface; IMFMediaType *video_type; - const BYTE *frame_data; struct test_host host; IMFTransform *mixer; - LONGLONG timestamp; IMFSample *sample; - LONG stride; HWND window; - BYTE *data; HRESULT hr; + DWORD diff; RECT rect; window = create_window(); @@ -3393,86 +3499,217 @@ static void test_presenter_orientation(const GUID *subtype) hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_BEGINSTREAMING, 0); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - if (IsEqualGUID(subtype, &MFVideoFormat_NV12)) + create_d3d_sample(manager, subtype, &sample); + hr = IMFTransform_ProcessInput(mixer, 0, sample, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_PROCESSINPUTNOTIFY, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFSample_Release(sample); + + SetRect(&rect, 0, 0, expect_header.biWidth, expect_header.biHeight); + diff = check_presenter_output(presenter, &expect_header, L"rgb32frame-flip.bmp", &rect); + ok(diff <= 5, "Unexpected %lu%% diff\n", diff); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_ENDSTREAMING, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + +skip_tests: + hr = IMFVideoPresenter_QueryInterface(presenter, &IID_IMFTopologyServiceLookupClient, (void **)&lookup_client); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTopologyServiceLookupClient_ReleaseServicePointers(lookup_client); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFTopologyServiceLookupClient_Release(lookup_client); + + IMFTransform_Release(mixer); + IMFVideoPresenter_Release(presenter); + + DestroyWindow(window); +} + +static void test_mixer_video_aperture(void) +{ + IMFTopologyServiceLookupClient *lookup_client; + static const BITMAPINFOHEADER expect_header_crop = { - load_resource(L"nv12frame.bmp", &frame_data, &frame_data_len); - /* skip BMP header and RGB data from the dump */ - data_size = *(DWORD *)(frame_data + 2); - frame_data_len = frame_data_len - data_size; - frame_data = frame_data + data_size; - ok(frame_data_len == 13824, "got length %lu\n", frame_data_len); - } - else + .biSize = sizeof(BITMAPINFOHEADER), + .biWidth = 34, .biHeight = 56, + .biPlanes = 1, .biBitCount = 32, + .biCompression = BI_RGB, + .biSizeImage = 34 * 56 * 4, + }; + static const BITMAPINFOHEADER expect_header = { - load_resource(L"rgb32frame.bmp", &frame_data, &frame_data_len); - /* skip BMP header from the dump */ - data_size = *(DWORD *)(frame_data + 2 + 2 * sizeof(DWORD)); - frame_data_len -= data_size; - frame_data += data_size; - ok(frame_data_len == 36864, "got length %lu\n", frame_data_len); - } + .biSize = sizeof(BITMAPINFOHEADER), + .biWidth = 96, .biHeight = 96, + .biPlanes = 1, .biBitCount = 32, + .biCompression = BI_RGB, + .biSizeImage = 96 * 96 * 4, + }; + const MFVideoArea aperture = {.Area = {.cx = 34, .cy = 56}}; + IDirect3DDeviceManager9 *manager; + IMFVideoPresenter *presenter; + IMFMediaType *video_type; + struct test_host host; + IMFTransform *mixer; + IMFSample *sample; + HWND window; + HRESULT hr; + DWORD diff; + RECT rect; - surface = create_surface(manager, subtype->Data1, expect_header.biWidth, expect_header.biHeight); - ok(!!surface, "Failed to create input surface.\n"); - hr = IDirect3DSurface9_LockRect(surface, &d3d_rect, NULL, 0); + window = create_window(); + + hr = MFCreateVideoMixer(NULL, &IID_IDirect3DDevice9, &IID_IMFTransform, (void **)&mixer); + ok(hr == S_OK, "Failed to create a mixer, hr %#lx.\n", hr); + hr = MFCreateVideoPresenter(NULL, &IID_IDirect3DDevice9, &IID_IMFVideoPresenter, (void **)&presenter); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - if (IsEqualGUID(subtype, &MFVideoFormat_RGB32)) - memcpy(d3d_rect.pBits, frame_data, frame_data_len); - else if (IsEqualGUID(subtype, &MFVideoFormat_NV12)) - { - hr = MFGetStrideForBitmapInfoHeader(subtype->Data1, expect_header.biWidth, &stride); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = MFCopyImage(d3d_rect.pBits, d3d_rect.Pitch, frame_data, stride, expect_header.biWidth, expect_header.biHeight); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - frame_data += stride * expect_header.biHeight; - d3d_rect.pBits = (BYTE *)d3d_rect.pBits + d3d_rect.Pitch * expect_header.biHeight; - hr = MFCopyImage(d3d_rect.pBits, d3d_rect.Pitch, frame_data, stride, expect_header.biWidth, expect_header.biHeight / 2); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - } - hr = IDirect3DSurface9_UnlockRect(surface); + + init_test_host(&host, mixer, presenter); + hr = IMFVideoPresenter_QueryInterface(presenter, &IID_IMFTopologyServiceLookupClient, (void **)&lookup_client); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = MFCreateVideoSampleFromSurface((IUnknown *)surface, &sample); + hr = IMFTopologyServiceLookupClient_InitServicePointers(lookup_client, &host.IMFTopologyServiceLookup_iface); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - IDirect3DSurface9_Release(surface); + IMFTopologyServiceLookupClient_Release(lookup_client); + + /* Configure device and media types. */ + hr = MFGetService((IUnknown *)presenter, &MR_VIDEO_ACCELERATION_SERVICE, &IID_IDirect3DDeviceManager9, (void **)&manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_ProcessMessage(mixer, MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IDirect3DDeviceManager9_Release(manager); + + + /* MF_MT_MINIMUM_DISPLAY_APERTURE / MF_MT_PAN_SCAN_APERTURE have no effect */ + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(video_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(video_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetInputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetOutputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_INVALIDATEMEDIATYPE, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_BEGINSTREAMING, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + create_d3d_sample(manager, &MFVideoFormat_RGB32, &sample); hr = IMFTransform_ProcessInput(mixer, 0, sample, 0); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_PROCESSINPUTNOTIFY, 0); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); IMFSample_Release(sample); - hr = IMFVideoPresenter_QueryInterface(presenter, &IID_IMFVideoDisplayControl, (void **)&display_control); + SetRect(&rect, 0, 0, expect_header.biWidth, expect_header.biHeight); + diff = check_presenter_output(presenter, &expect_header, L"rgb32frame-flip.bmp", &rect); + ok(diff <= 5, "Unexpected %lu%% diff\n", diff); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_ENDSTREAMING, 0); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFVideoDisplayControl_GetCurrentImage(display_control, &header, &data, &data_size, ×tamp); - if (hr == MF_E_INVALIDREQUEST) - { - Sleep(500); - hr = IMFVideoDisplayControl_GetCurrentImage(display_control, &header, &data, &data_size, ×tamp); - } + + + /* MF_MT_PAN_SCAN_APERTURE has an effect only when enabled */ + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - IMFVideoDisplayControl_Release(display_control); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetOutputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(video_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_PAN_SCAN_ENABLED, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetInputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_INVALIDATEMEDIATYPE, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_BEGINSTREAMING, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + create_d3d_sample(manager, &MFVideoFormat_RGB32, &sample); + hr = IMFTransform_ProcessInput(mixer, 0, sample, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_PROCESSINPUTNOTIFY, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFSample_Release(sample); + + SetRect(&rect, 0, 0, expect_header_crop.biWidth, expect_header_crop.biHeight); + diff = check_presenter_output_(__LINE__, presenter, &expect_header_crop, L"rgb32frame-crop.bmp", &rect, TRUE); + todo_wine ok(diff <= 5, "Unexpected %lu%% diff\n", diff); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_ENDSTREAMING, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + - ok(header.biSize == expect_header.biSize, "Unexpected biSize %#lx\n", header.biSize); - ok(header.biWidth == expect_header.biWidth, "Unexpected biWidth %#lx\n", header.biWidth); - ok(header.biHeight == expect_header.biHeight, "Unexpected biHeight %#lx\n", header.biHeight); - ok(header.biPlanes == expect_header.biPlanes, "Unexpected biPlanes %#x\n", header.biPlanes); - ok(header.biBitCount == expect_header.biBitCount, "Unexpected biBitCount %#x\n", header.biBitCount); - ok(header.biCompression == expect_header.biCompression, "Unexpected biCompression %#lx\n", header.biCompression); - ok(header.biSizeImage == expect_header.biSizeImage, "Unexpected biSizeImage %#lx\n", header.biSizeImage); - ok(header.biXPelsPerMeter == expect_header.biXPelsPerMeter, "Unexpected biXPelsPerMeter %#lx\n", header.biXPelsPerMeter); - ok(header.biYPelsPerMeter == expect_header.biYPelsPerMeter, "Unexpected biYPelsPerMeter %#lx\n", header.biYPelsPerMeter); - ok(header.biClrUsed == expect_header.biClrUsed, "Unexpected biClrUsed %#lx\n", header.biClrUsed); - ok(header.biClrImportant == expect_header.biClrImportant, "Unexpected biClrImportant %#lx\n", header.biClrImportant); - - SetRect(&rect, 0, 0, header.biWidth, header.biHeight); - diff = check_rgb32_data(L"rgb32frame-flip.bmp", data, header.biSizeImage, &rect); + /* MF_MT_GEOMETRIC_APERTURE has an effect */ + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(video_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetInputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + video_type = create_video_type(&MFVideoFormat_RGB32); + hr = IMFMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, (UINT64)expect_header.biWidth << 32 | expect_header.biHeight); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFTransform_SetOutputType(mixer, 0, video_type, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(video_type); + + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_INVALIDATEMEDIATYPE, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_BEGINSTREAMING, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + create_d3d_sample(manager, &MFVideoFormat_RGB32, &sample); + hr = IMFTransform_ProcessInput(mixer, 0, sample, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_PROCESSINPUTNOTIFY, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFSample_Release(sample); + + SetRect(&rect, 0, 0, expect_header_crop.biWidth, expect_header_crop.biHeight); + diff = check_presenter_output(presenter, &expect_header_crop, L"rgb32frame-crop.bmp", &rect); ok(diff <= 5, "Unexpected %lu%% diff\n", diff); - CoTaskMemFree(data); hr = IMFVideoPresenter_ProcessMessage(presenter, MFVP_MESSAGE_ENDSTREAMING, 0); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); -skip_tests: + hr = IMFVideoPresenter_QueryInterface(presenter, &IID_IMFTopologyServiceLookupClient, (void **)&lookup_client); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFTopologyServiceLookupClient_ReleaseServicePointers(lookup_client); @@ -3688,6 +3925,7 @@ START_TEST(evr) test_presenter_media_type(); test_presenter_orientation(&MFVideoFormat_NV12); test_presenter_orientation(&MFVideoFormat_RGB32); + test_mixer_video_aperture(); test_presenter_shutdown(); test_mixer_output_rectangle(); test_mixer_zorder(); diff --git a/dlls/evr/tests/resource.rc b/dlls/evr/tests/resource.rc index 79b62304303..7537acfe5ac 100644 --- a/dlls/evr/tests/resource.rc +++ b/dlls/evr/tests/resource.rc @@ -20,14 +20,18 @@ #include "windef.h" -/* Generated from running the mf:transform tests on Windows */ +/* Generated from running the evr:evr tests on Windows */ /* @makedep: rgb32frame.bmp */ rgb32frame.bmp RCDATA rgb32frame.bmp -/* Generated from running the mf:transform tests on Windows */ +/* Generated from running the evr:evr tests on Windows */ /* @makedep: rgb32frame-flip.bmp */ rgb32frame-flip.bmp RCDATA rgb32frame-flip.bmp -/* Generated from running the mf:transform tests on Windows */ +/* Generated from running the evr:evr tests on Windows */ +/* @makedep: rgb32frame-crop.bmp */ +rgb32frame-crop.bmp RCDATA rgb32frame-crop.bmp + +/* Generated from running the evr:evr tests on Windows */ /* @makedep: nv12frame.bmp */ nv12frame.bmp RCDATA nv12frame.bmp diff --git a/dlls/evr/tests/rgb32frame-crop.bmp b/dlls/evr/tests/rgb32frame-crop.bmp new file mode 100644 index 00000000000..6cbaf72df60 Binary files /dev/null and b/dlls/evr/tests/rgb32frame-crop.bmp differ diff --git a/dlls/mf/mf_private.h b/dlls/mf/mf_private.h index bbfadaee5d8..726592654c5 100644 --- a/dlls/mf/mf_private.h +++ b/dlls/mf/mf_private.h @@ -121,3 +121,7 @@ extern BOOL mf_is_sar_sink(IMFMediaSink *sink); extern HRESULT topology_node_get_object(IMFTopologyNode *node, REFIID riid, void **obj); extern HRESULT topology_node_get_type_handler(IMFTopologyNode *node, DWORD stream, BOOL output, IMFMediaTypeHandler **handler); extern HRESULT topology_node_init_media_type(IMFTopologyNode *node, DWORD stream, BOOL output, IMFMediaType **type); + +extern BOOL topology_node_is_d3d_aware(IMFTopologyNode *node); +extern HRESULT topology_node_set_device_manager(IMFTopologyNode *node, IUnknown *device_manager); +extern HRESULT stream_sink_get_device_manager(IMFStreamSink *stream_sink, IUnknown **device_manager); \ No newline at end of file diff --git a/dlls/mf/session.c b/dlls/mf/session.c index ef707dea4de..4f94d3c30a7 100644 --- a/dlls/mf/session.c +++ b/dlls/mf/session.c @@ -144,10 +144,10 @@ struct media_sink BOOL finalized; }; -struct sample +struct event_entry { struct list entry; - IMFSample *sample; + IMFMediaEvent *event; }; struct transform_stream @@ -155,6 +155,7 @@ struct transform_stream struct list samples; unsigned int requests; unsigned int min_buffer_size; + IMFSample *allocated_sample; BOOL draining; }; @@ -676,39 +677,86 @@ static void session_set_caps(struct media_session *session, DWORD caps) static HRESULT transform_stream_push_sample(struct transform_stream *stream, IMFSample *sample) { - struct sample *entry; + PROPVARIANT value = {.vt = VT_UNKNOWN, .punkVal = (IUnknown *)sample}; + struct event_entry *entry; + HRESULT hr; if (!(entry = calloc(1, sizeof(*entry)))) return E_OUTOFMEMORY; + if (FAILED(hr = MFCreateMediaEvent(MEMediaSample, &GUID_NULL, S_OK, &value, &entry->event))) + { + free(entry); + return hr; + } + + list_add_tail(&stream->samples, &entry->entry); + return S_OK; +} - entry->sample = sample; - IMFSample_AddRef(entry->sample); +static HRESULT transform_stream_push_format_change(struct transform_stream *stream, IMFMediaType *media_type) +{ + PROPVARIANT value = {.vt = VT_UNKNOWN, .punkVal = (IUnknown *)media_type}; + struct event_entry *entry; + HRESULT hr; + + if (!(entry = calloc(1, sizeof(*entry)))) + return E_OUTOFMEMORY; + if (FAILED(hr = MFCreateMediaEvent(MEStreamFormatChanged, &GUID_NULL, S_OK, &value, &entry->event))) + { + free(entry); + return hr; + } list_add_tail(&stream->samples, &entry->entry); return S_OK; } -static HRESULT transform_stream_pop_sample(struct transform_stream *stream, IMFSample **sample) +static HRESULT transform_stream_push_events(struct transform_stream *stream, IMFCollection *events) { - struct sample *entry; + struct event_entry *entry; + IMFMediaEvent *event; + + while (SUCCEEDED(IMFCollection_RemoveElement(events, 0, (IUnknown **)&event))) + { + if (!(entry = calloc(1, sizeof(*entry)))) + { + IMFMediaEvent_Release(event); + return E_OUTOFMEMORY; + } + entry->event = event; + list_add_tail(&stream->samples, &entry->entry); + } + + return S_OK; +} + +static HRESULT transform_stream_pop_event(struct transform_stream *stream, IMFMediaEvent **event) +{ + struct event_entry *entry; struct list *ptr; if (!(ptr = list_head(&stream->samples))) return MF_E_TRANSFORM_NEED_MORE_INPUT; - entry = LIST_ENTRY(ptr, struct sample, entry); + entry = LIST_ENTRY(ptr, struct event_entry, entry); list_remove(&entry->entry); - *sample = entry->sample; + *event = entry->event; free(entry); return S_OK; } -static void transform_stream_drop_samples(struct transform_stream *stream) +static void transform_stream_drop_events(struct transform_stream *stream) { - IMFSample *sample; + IMFMediaEvent *event; + + if (stream->allocated_sample) + { + IMFSample_Release(stream->allocated_sample); + stream->allocated_sample = NULL; + } - while (SUCCEEDED(transform_stream_pop_sample(stream, &sample))) - IMFSample_Release(sample); + while (SUCCEEDED(transform_stream_pop_event(stream, &event))) + IMFMediaEvent_Release(event); } static void release_topo_node(struct topo_node *node) @@ -723,9 +771,9 @@ static void release_topo_node(struct topo_node *node) break; case MF_TOPOLOGY_TRANSFORM_NODE: for (i = 0; i < node->u.transform.input_count; ++i) - transform_stream_drop_samples(&node->u.transform.inputs[i]); + transform_stream_drop_events(&node->u.transform.inputs[i]); for (i = 0; i < node->u.transform.output_count; ++i) - transform_stream_drop_samples(&node->u.transform.outputs[i]); + transform_stream_drop_events(&node->u.transform.outputs[i]); free(node->u.transform.inputs); free(node->u.transform.outputs); free(node->u.transform.input_map); @@ -855,19 +903,53 @@ static void session_clear_presentation(struct media_session *session) } } -static struct topo_node *session_get_node_by_id(const struct media_session *session, TOPOID id) +static struct topo_node *session_get_topo_node(const struct media_session *session, IMFTopologyNode *node) { - struct topo_node *node; + struct topo_node *topo_node; + TOPOID id; - LIST_FOR_EACH_ENTRY(node, &session->presentation.nodes, struct topo_node, entry) + if (FAILED(IMFTopologyNode_GetTopoNodeID(node, &id))) + return NULL; + + LIST_FOR_EACH_ENTRY(topo_node, &session->presentation.nodes, struct topo_node, entry) { - if (node->node_id == id) - return node; + if (topo_node->node_id == id) + return topo_node; } return NULL; } +static struct topo_node *session_get_topo_node_input(const struct media_session *session, + const struct topo_node *down_node, DWORD input, DWORD *output) +{ + struct topo_node *up_node = NULL; + IMFTopologyNode *node; + + if (SUCCEEDED(IMFTopologyNode_GetInput(down_node->node, input, &node, output))) + { + up_node = session_get_topo_node(session, node); + IMFTopologyNode_Release(node); + } + + return up_node; +} + +static struct topo_node *session_get_topo_node_output(const struct media_session *session, + const struct topo_node *up_node, DWORD output, DWORD *input) +{ + struct topo_node *down_node = NULL; + IMFTopologyNode *node; + + if (SUCCEEDED(IMFTopologyNode_GetOutput(up_node->node, output, &node, input))) + { + down_node = session_get_topo_node(session, node); + IMFTopologyNode_Release(node); + } + + return down_node; +} + static void session_command_complete(struct media_session *session) { struct session_op *op; @@ -1621,7 +1703,7 @@ static ULONG WINAPI node_sample_allocator_cb_Release(IMFVideoSampleAllocatorNoti return 1; } -static HRESULT session_request_sample_from_node(struct media_session *session, IMFTopologyNode *node, DWORD output); +static HRESULT session_request_sample_from_node(struct media_session *session, struct topo_node *topo_node, DWORD output); static HRESULT WINAPI node_sample_allocator_cb_NotifyRelease(IMFVideoSampleAllocatorNotify *iface) { @@ -1671,6 +1753,8 @@ static HRESULT session_append_node(struct media_session *session, IMFTopologyNod if (SUCCEEDED(hr = session_add_media_sink(session, node, media_sink))) { + IUnknown *device_manager; + if (SUCCEEDED(session_get_stream_sink_type(topo_node->object.sink_stream, &media_type))) { if (SUCCEEDED(MFGetService(topo_node->object.object, &MR_VIDEO_ACCELERATION_SERVICE, @@ -1688,6 +1772,21 @@ static HRESULT session_append_node(struct media_session *session, IMFTopologyNod } IMFMediaType_Release(media_type); } + + if (SUCCEEDED(stream_sink_get_device_manager(topo_node->object.sink_stream, &device_manager))) + { + IMFTopologyNode *upstream; + DWORD output; + + if (SUCCEEDED(IMFTopologyNode_GetInput(topo_node->node, 0, &upstream, &output))) + { + if (topology_node_is_d3d_aware(upstream)) + topology_node_set_device_manager(upstream, device_manager); + IMFTopologyNode_Release(upstream); + } + + IUnknown_Release(device_manager); + } } IMFMediaSink_Release(media_sink); @@ -2563,7 +2662,7 @@ static HRESULT WINAPI session_commands_callback_GetParameters(IMFAsyncCallback * return E_NOTIMPL; } -static void session_deliver_pending_samples(struct media_session *session, IMFTopologyNode *node); +static void session_deliver_pending_samples(struct media_session *session, struct topo_node *topo_node); static HRESULT WINAPI session_commands_callback_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) { @@ -2678,20 +2777,18 @@ static HRESULT WINAPI session_sa_ready_callback_GetParameters(IMFAsyncCallback * static HRESULT WINAPI session_sa_ready_callback_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) { IMFVideoSampleAllocatorNotify *notify = (IMFVideoSampleAllocatorNotify *)IMFAsyncResult_GetStateNoAddRef(result); - struct topo_node *topo_node = impl_node_from_IMFVideoSampleAllocatorNotify(notify); + struct topo_node *topo_node = impl_node_from_IMFVideoSampleAllocatorNotify(notify), *up_node; struct media_session *session = impl_from_sa_ready_callback_IMFAsyncCallback(iface); - IMFTopologyNode *upstream_node; - DWORD upstream_output; + DWORD output; EnterCriticalSection(&session->cs); if (topo_node->u.sink.requests) { - if (SUCCEEDED(IMFTopologyNode_GetInput(topo_node->node, 0, &upstream_node, &upstream_output))) - { - session_deliver_pending_samples(session, upstream_node); - IMFTopologyNode_Release(upstream_node); - } + if (!(up_node = session_get_topo_node_input(session, topo_node, 0, &output))) + WARN("Failed to node %p/%u input\n", topo_node, 0); + else + session_deliver_pending_samples(session, up_node); } LeaveCriticalSection(&session->cs); @@ -3085,26 +3182,29 @@ static void session_set_sink_stream_state(struct media_session *session, IMFStre } static HRESULT transform_get_external_output_sample(const struct media_session *session, struct topo_node *transform, - unsigned int output_index, const MFT_OUTPUT_STREAM_INFO *stream_info, IMFSample **sample) + DWORD output, const MFT_OUTPUT_STREAM_INFO *stream_info, IMFSample **sample) { - IMFTopologyNode *downstream_node; + struct transform_stream *stream = &transform->u.transform.outputs[output]; + DWORD buffer_size, sample_size, input; IMFMediaBuffer *buffer = NULL; struct topo_node *topo_node; - unsigned int buffer_size; - DWORD downstream_input; - TOPOID node_id; HRESULT hr; - if (FAILED(IMFTopologyNode_GetOutput(transform->node, output_index, &downstream_node, &downstream_input))) + buffer_size = max(stream_info->cbSize, stream->min_buffer_size); + if ((*sample = stream->allocated_sample)) { - WARN("Failed to get connected node for output %u.\n", output_index); - return MF_E_UNEXPECTED; + stream->allocated_sample = NULL; + if (SUCCEEDED(IMFSample_GetTotalLength(*sample, &sample_size)) && sample_size >= buffer_size) + return S_OK; + IMFSample_Release(*sample); + *sample = NULL; } - IMFTopologyNode_GetTopoNodeID(downstream_node, &node_id); - IMFTopologyNode_Release(downstream_node); - - topo_node = session_get_node_by_id(session, node_id); + if (!(topo_node = session_get_topo_node_output(session, transform, output, &input))) + { + WARN("Failed to node %p/%lu output.\n", transform, output); + return MF_E_UNEXPECTED; + } if (topo_node->type == MF_TOPOLOGY_OUTPUT_NODE && topo_node->u.sink.allocator) { @@ -3112,8 +3212,6 @@ static HRESULT transform_get_external_output_sample(const struct media_session * } else { - buffer_size = max(stream_info->cbSize, transform->u.transform.outputs[output_index].min_buffer_size); - hr = MFCreateAlignedMemoryBuffer(buffer_size, stream_info->cbAlignment, &buffer); if (SUCCEEDED(hr)) hr = MFCreateSample(sample); @@ -3128,58 +3226,218 @@ static HRESULT transform_get_external_output_sample(const struct media_session * return hr; } -static HRESULT transform_node_pull_samples(const struct media_session *session, struct topo_node *node) +/* update the transform output type while keeping subtype which matches the old output type */ +static HRESULT transform_stream_update_output_type(struct topo_node *node, struct transform_stream *stream, + UINT id, IMFMediaType *old_output_type, IMFMediaType **new_output_type) { - MFT_OUTPUT_STREAM_INFO stream_info; - MFT_OUTPUT_DATA_BUFFER *buffers; - HRESULT hr = E_UNEXPECTED; - DWORD status = 0; + GUID subtype, desired; + UINT i = 0; + HRESULT hr; + + TRACE("node %p, stream %p, id %u, old_output_type %p, new_output_type %p\n", + node, stream, id, old_output_type, new_output_type); + + IMFMediaType_GetGUID(old_output_type, &MF_MT_SUBTYPE, &desired); + + /* find an available output type matching the desired subtype */ + while (SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(node->object.transform, id, + i++, new_output_type))) + { + IMFMediaType_GetGUID(*new_output_type, &MF_MT_SUBTYPE, &subtype); + if (IsEqualGUID(&subtype, &desired)) + { + if (FAILED(hr = IMFTransform_SetOutputType(node->object.transform, id, *new_output_type, 0))) + { + IMFMediaType_Release(*new_output_type); + break; + } + return S_OK; + } + IMFMediaType_Release(*new_output_type); + } + + *new_output_type = NULL; + return hr; +} + +static HRESULT transform_node_format_changed(struct topo_node *node, MFT_OUTPUT_DATA_BUFFER *buffers) +{ + HRESULT hr = S_OK; unsigned int i; - if (!(buffers = calloc(node->u.transform.output_count, sizeof(*buffers)))) + TRACE("node %p, buffers %p\n", node, buffers); + + for (i = 0; SUCCEEDED(hr) && i < node->u.transform.output_count; ++i) + { + struct transform_stream *stream = &node->u.transform.outputs[i]; + IMFMediaType *old_output_type, *new_output_type; + UINT id = buffers[i].dwStreamID; + + if (!(buffers[i].dwStatus & MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE)) + continue; + + if (SUCCEEDED(hr = IMFTransform_GetOutputCurrentType(node->object.transform, id, &old_output_type))) + { + if (SUCCEEDED(hr = transform_stream_update_output_type(node, stream, id, old_output_type, + &new_output_type))) + { + if (buffers[i].pEvents || SUCCEEDED(hr = MFCreateCollection(&buffers[i].pEvents))) + { + if (FAILED(hr = transform_stream_push_format_change(stream, new_output_type))) + WARN("Failed to queue format change event, hr %#lx\n", hr); + } + IMFMediaType_Release(new_output_type); + } + IMFMediaType_Release(old_output_type); + } + } + + return hr; +} + +static HRESULT transform_stream_update_input_type(struct topo_node *node, UINT input, IMFMediaType *media_type) +{ + IMFMediaType **old_output_types; + IMFMediaType *new_output_type; + IMFMediaTypeHandler *handler; + UINT output; + HRESULT hr; + + TRACE("node %p, input %u, media_type %p\n", node, input, media_type); + + if (!(old_output_types = calloc(node->u.transform.output_count, sizeof(*old_output_types)))) return E_OUTOFMEMORY; + for (output = 0; output < node->u.transform.output_count; ++output) + { + UINT id = transform_node_get_stream_id(node, TRUE, output); + if (FAILED(hr = IMFTransform_GetOutputCurrentType(node->object.transform, id, + &old_output_types[output]))) + goto done; + } + + if (SUCCEEDED(hr = topology_node_get_type_handler(node->node, input, FALSE, &handler))) + { + if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(handler, media_type))) + WARN("Failed to change note %p input %u media type\n", node->node, input); + IMFMediaTypeHandler_Release(handler); + } + + for (output = 0; SUCCEEDED(hr) && output < node->u.transform.output_count; ++output) + { + struct transform_stream *stream = &node->u.transform.outputs[output]; + UINT id = transform_node_get_stream_id(node, TRUE, output); + + /* check if transform output type is still valid or if we need to update it as well */ + if (SUCCEEDED(hr = IMFTransform_GetOutputCurrentType(node->object.transform, id, &new_output_type))) + { + IMFMediaType_Release(new_output_type); + continue; + } + + if (SUCCEEDED(hr = transform_stream_update_output_type(node, stream, id, + old_output_types[output], &new_output_type))) + { + if (FAILED(hr = transform_stream_push_format_change(stream, new_output_type))) + WARN("Failed to queue format change event, hr %#lx\n", hr); + IMFMediaType_Release(new_output_type); + } + } + +done: + for (output = 0; output < node->u.transform.output_count; ++output) + if (old_output_types[output]) + IMFMediaType_Release(old_output_types[output]); + free(old_output_types); + return hr; +} + +static HRESULT allocate_output_samples(const struct media_session *session, struct topo_node *node, + MFT_OUTPUT_DATA_BUFFER *buffers) +{ + HRESULT hr; + UINT i; + for (i = 0; i < node->u.transform.output_count; ++i) { + MFT_OUTPUT_STREAM_INFO stream_info = {0}; + buffers[i].dwStreamID = transform_node_get_stream_id(node, TRUE, i); - buffers[i].pSample = NULL; - buffers[i].dwStatus = 0; - buffers[i].pEvents = NULL; - memset(&stream_info, 0, sizeof(stream_info)); if (FAILED(hr = IMFTransform_GetOutputStreamInfo(node->object.transform, buffers[i].dwStreamID, &stream_info))) - break; + return hr; + if (!(stream_info.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES)) + && FAILED(hr = transform_get_external_output_sample(session, node, i, &stream_info, &buffers[i].pSample))) + return hr; + } - if (!(stream_info.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES))) - { - if (FAILED(hr = transform_get_external_output_sample(session, node, i, &stream_info, &buffers[i].pSample))) - break; - } + return S_OK; +} + +static void release_output_samples(struct topo_node *node, MFT_OUTPUT_DATA_BUFFER *buffers) +{ + UINT i; + + for (i = 0; i < node->u.transform.output_count; ++i) + { + if (buffers[i].pSample) + IMFSample_Release(buffers[i].pSample); + if (buffers[i].pEvents) + IMFCollection_Release(buffers[i].pEvents); } +} + +static HRESULT transform_node_pull_samples(const struct media_session *session, struct topo_node *node) +{ + MFT_OUTPUT_DATA_BUFFER *buffers; + DWORD status; + HRESULT hr; + UINT i; + + if (!(buffers = calloc(node->u.transform.output_count, sizeof(*buffers)))) + return E_OUTOFMEMORY; + if (FAILED(hr = allocate_output_samples(session, node, buffers))) + goto done; + + status = 0; + hr = IMFTransform_ProcessOutput(node->object.transform, 0, node->u.transform.output_count, buffers, &status); + if (hr == MF_E_TRANSFORM_STREAM_CHANGE && SUCCEEDED(hr = transform_node_format_changed(node, buffers))) + { + release_output_samples(node, buffers); + + memset(buffers, 0, node->u.transform.output_count * sizeof(*buffers)); + if (FAILED(hr = allocate_output_samples(session, node, buffers))) + goto done; - if (SUCCEEDED(hr)) hr = IMFTransform_ProcessOutput(node->object.transform, 0, node->u.transform.output_count, buffers, &status); + } /* Collect returned samples for all streams. */ for (i = 0; i < node->u.transform.output_count; ++i) { struct transform_stream *stream = &node->u.transform.outputs[i]; - if (buffers[i].pEvents) - IMFCollection_Release(buffers[i].pEvents); + if (buffers[i].pEvents && FAILED(hr = transform_stream_push_events(stream, buffers[i].pEvents))) + WARN("Failed to push transform events, hr %#lx\n", hr); + if (buffers[i].dwStatus & MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE) + continue; - if (SUCCEEDED(hr) && !(buffers[i].dwStatus & MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE)) + if (SUCCEEDED(hr)) { if (session->quality_manager) IMFQualityManager_NotifyProcessOutput(session->quality_manager, node->node, i, buffers[i].pSample); if (FAILED(hr = transform_stream_push_sample(stream, buffers[i].pSample))) WARN("Failed to queue output sample, hr %#lx\n", hr); } - - if (buffers[i].pSample) - IMFSample_Release(buffers[i].pSample); + else if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT && !stream->allocated_sample) + { + stream->allocated_sample = buffers[i].pSample; + buffers[i].pSample = NULL; + } } +done: + release_output_samples(node, buffers); free(buffers); return hr; @@ -3235,15 +3493,111 @@ static HRESULT transform_node_push_sample(const struct media_session *session, s return hr; } -static void session_deliver_sample_to_node(struct media_session *session, IMFTopologyNode *node, unsigned int input, +static void session_deliver_sample_to_node(struct media_session *session, struct topo_node *topo_node, unsigned int input, IMFSample *sample); +static void transform_node_deliver_samples(struct media_session *session, struct topo_node *topo_node); + +static HRESULT transform_node_handle_format_change(struct media_session *session, struct topo_node *topo_node, + UINT input, IMFMediaType *media_type) +{ + struct transform_stream *stream = &topo_node->u.transform.inputs[input]; + UINT id = transform_node_get_stream_id(topo_node, FALSE, input); + IMFTransform *transform = topo_node->object.transform; + UINT32 support_dynamic_format_change = 0; + IMFAttributes *attributes; + HRESULT hr; + + TRACE("session %p, topo_node %p, input %u, media_type %p\n", session, topo_node, input, media_type); + + if (SUCCEEDED(IMFTransform_GetAttributes(transform, &attributes))) + { + if (FAILED(IMFAttributes_GetUINT32(attributes, &MFT_SUPPORT_DYNAMIC_FORMAT_CHANGE, &support_dynamic_format_change))) + support_dynamic_format_change = 0; + IMFAttributes_Release(attributes); + } + + if (!support_dynamic_format_change) + { + if (SUCCEEDED(hr = IMFTransform_ProcessMessage(transform, MFT_MESSAGE_COMMAND_DRAIN, id))) + { + while (SUCCEEDED(hr = transform_node_pull_samples(session, topo_node))) + transform_node_deliver_samples(session, topo_node); + } + + if (hr != MF_E_TRANSFORM_NEED_MORE_INPUT) + { + /* transform isn't fully drained, put the event in the stream input queue to try again later */ + if (FAILED(transform_stream_push_format_change(stream, media_type))) + WARN("Failed to queue input format change event\n"); + return hr; + } + } + + return transform_stream_update_input_type(topo_node, input, media_type); +} + +static HRESULT session_handle_format_change(struct media_session *session, struct topo_node *topo_node, + UINT input, IMFMediaType *media_type) +{ + HRESULT hr; + + switch (topo_node->type) + { + case MF_TOPOLOGY_OUTPUT_NODE: + if (!topo_node->u.sink.allocator) + return S_OK; + if (SUCCEEDED(hr = IMFVideoSampleAllocator_UninitializeSampleAllocator(topo_node->u.sink.allocator))) + hr = IMFVideoSampleAllocator_InitializeSampleAllocator(topo_node->u.sink.allocator, 4, media_type); + return hr; + + case MF_TOPOLOGY_TRANSFORM_NODE: + return transform_node_handle_format_change(session, topo_node, input, media_type); + + default: + FIXME("Unhandled downstream node type %d.\n", topo_node->type); + return E_NOTIMPL; + } +} + +static HRESULT transform_stream_handle_event(struct media_session *session, struct transform_stream *stream, + struct topo_node *topo_node, unsigned int input, IMFMediaEvent *event) +{ + MediaEventType type; + PROPVARIANT value; + HRESULT hr; + + if (FAILED(hr = IMFMediaEvent_GetType(event, &type))) + return hr; + PropVariantInit(&value); + + switch (type) + { + case MEMediaSample: + if (SUCCEEDED(hr = IMFMediaEvent_GetValue(event, &value))) + session_deliver_sample_to_node(session, topo_node, input, (IMFSample *)value.punkVal); + break; + + case MEStreamFormatChanged: + if (SUCCEEDED(hr = IMFMediaEvent_GetValue(event, &value))) + hr = session_handle_format_change(session, topo_node, input, (IMFMediaType *)value.punkVal); + break; + + default: + ERR("Unexpected event type %lu\n", type); + hr = E_NOTIMPL; + break; + } + + PropVariantClear(&value); + return hr; +} static void transform_node_deliver_samples(struct media_session *session, struct topo_node *topo_node) { - IMFTopologyNode *up_node = topo_node->node, *down_node; BOOL drained = transform_node_is_drained(topo_node); + struct topo_node *up_node, *down_node; + IMFMediaEvent *event; DWORD output, input; - IMFSample *sample; HRESULT hr = S_OK; /* Push down all available output. */ @@ -3251,26 +3605,30 @@ static void transform_node_deliver_samples(struct media_session *session, struct { struct transform_stream *stream = &topo_node->u.transform.outputs[output]; - if (FAILED(hr = IMFTopologyNode_GetOutput(up_node, output, &down_node, &input))) + if (!(down_node = session_get_topo_node_output(session, topo_node, output, &input))) { - WARN("Failed to node %p/%lu output, hr %#lx.\n", up_node, output, hr); + WARN("Failed to node %p/%lu output\n", topo_node, output); continue; } while (stream->requests) { - if (FAILED(hr = transform_stream_pop_sample(stream, &sample))) + MediaEventType type; + + if (FAILED(hr = transform_stream_pop_event(stream, &event))) { /* try getting more samples by calling IMFTransform_ProcessOutput */ if (FAILED(hr = transform_node_pull_samples(session, topo_node))) break; - if (FAILED(hr = transform_stream_pop_sample(stream, &sample))) + if (FAILED(hr = transform_stream_pop_event(stream, &event))) break; } - session_deliver_sample_to_node(session, down_node, input, sample); - stream->requests--; - IMFSample_Release(sample); + if (FAILED(hr = transform_stream_handle_event(session, stream, down_node, input, event))) + ERR("Failed to handle stream event, hr %#lx\n", hr); + else if (SUCCEEDED(IMFMediaEvent_GetType(event, &type)) && type == MEMediaSample) + stream->requests--; + IMFMediaEvent_Release(event); } while (stream->requests && drained) @@ -3278,8 +3636,6 @@ static void transform_node_deliver_samples(struct media_session *session, struct session_deliver_sample_to_node(session, down_node, input, NULL); stream->requests--; } - - IMFTopologyNode_Release(down_node); } if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT && transform_node_has_requests(topo_node)) @@ -3289,36 +3645,28 @@ static void transform_node_deliver_samples(struct media_session *session, struct input = topo_node->u.transform.next_input++ % topo_node->u.transform.input_count; stream = &topo_node->u.transform.inputs[input]; - if (SUCCEEDED(transform_stream_pop_sample(stream, &sample))) - session_deliver_sample_to_node(session, topo_node->node, input, sample); - else if (FAILED(hr = IMFTopologyNode_GetInput(topo_node->node, input, &up_node, &output))) - WARN("Failed to get node %p/%lu input, hr %#lx\n", topo_node->node, input, hr); - else + if (SUCCEEDED(transform_stream_pop_event(stream, &event))) { - if (FAILED(hr = session_request_sample_from_node(session, up_node, output))) - WARN("Failed to request sample from upstream node %p/%lu, hr %#lx\n", up_node, output, hr); - IMFTopologyNode_Release(up_node); + if (FAILED(hr = transform_stream_handle_event(session, stream, topo_node, input, event))) + ERR("Failed to handle stream event, hr %#lx\n", hr); + IMFMediaEvent_Release(event); } + else if (!(up_node = session_get_topo_node_input(session, topo_node, input, &output))) + WARN("Failed to node %p/%lu input\n", topo_node, input); + else if (FAILED(hr = session_request_sample_from_node(session, up_node, output))) + WARN("Failed to request sample from upstream node %p/%lu, hr %#lx\n", up_node, output, hr); } } -static void session_deliver_sample_to_node(struct media_session *session, IMFTopologyNode *node, unsigned int input, +static void session_deliver_sample_to_node(struct media_session *session, struct topo_node *topo_node, unsigned int input, IMFSample *sample) { - struct topo_node *topo_node; - MF_TOPOLOGY_TYPE node_type; - TOPOID node_id; HRESULT hr; if (session->quality_manager) - IMFQualityManager_NotifyProcessInput(session->quality_manager, node, input, sample); + IMFQualityManager_NotifyProcessInput(session->quality_manager, topo_node->node, input, sample); - IMFTopologyNode_GetNodeType(node, &node_type); - IMFTopologyNode_GetTopoNodeID(node, &node_id); - - topo_node = session_get_node_by_id(session, node_id); - - switch (node_type) + switch (topo_node->type) { case MF_TOPOLOGY_OUTPUT_NODE: if (topo_node->u.sink.requests) @@ -3343,52 +3691,34 @@ static void session_deliver_sample_to_node(struct media_session *session, IMFTop transform_node_deliver_samples(session, topo_node); break; case MF_TOPOLOGY_TEE_NODE: - FIXME("Unhandled downstream node type %d.\n", node_type); + FIXME("Unhandled downstream node type %d.\n", topo_node->type); break; default: ; } } -static void session_deliver_pending_samples(struct media_session *session, IMFTopologyNode *node) +static void session_deliver_pending_samples(struct media_session *session, struct topo_node *topo_node) { - struct topo_node *topo_node; - MF_TOPOLOGY_TYPE node_type; - TOPOID node_id; - - IMFTopologyNode_GetNodeType(node, &node_type); - IMFTopologyNode_GetTopoNodeID(node, &node_id); - - topo_node = session_get_node_by_id(session, node_id); - - switch (node_type) + switch (topo_node->type) { case MF_TOPOLOGY_TRANSFORM_NODE: transform_node_pull_samples(session, topo_node); transform_node_deliver_samples(session, topo_node); break; default: - FIXME("Unexpected node type %u.\n", node_type); + FIXME("Unexpected node type %u.\n", topo_node->type); } } -static HRESULT session_request_sample_from_node(struct media_session *session, IMFTopologyNode *node, DWORD output) +static HRESULT session_request_sample_from_node(struct media_session *session, struct topo_node *topo_node, DWORD output) { - IMFTopologyNode *down_node; - struct topo_node *topo_node; - MF_TOPOLOGY_TYPE node_type; + struct topo_node *down_node; HRESULT hr = S_OK; - IMFSample *sample; - TOPOID node_id; DWORD input; - IMFTopologyNode_GetNodeType(node, &node_type); - IMFTopologyNode_GetTopoNodeID(node, &node_id); - - topo_node = session_get_node_by_id(session, node_id); - - switch (node_type) + switch (topo_node->type) { case MF_TOPOLOGY_SOURCESTREAM_NODE: if (FAILED(hr = IMFMediaStream_RequestSample(topo_node->object.source_stream, NULL))) @@ -3397,17 +3727,19 @@ static HRESULT session_request_sample_from_node(struct media_session *session, I case MF_TOPOLOGY_TRANSFORM_NODE: { struct transform_stream *stream = &topo_node->u.transform.outputs[output]; + IMFMediaEvent *event; - if (FAILED(hr = IMFTopologyNode_GetOutput(node, output, &down_node, &input))) + if (!(down_node = session_get_topo_node_output(session, topo_node, output, &input))) { - WARN("Failed to node %p/%lu output, hr %#lx.\n", node, output, hr); + WARN("Failed to node %p/%lu output\n", topo_node, output); break; } - if (SUCCEEDED(transform_stream_pop_sample(stream, &sample))) + if (SUCCEEDED(transform_stream_pop_event(stream, &event))) { - session_deliver_sample_to_node(session, down_node, input, sample); - IMFSample_Release(sample); + if (FAILED(hr = transform_stream_handle_event(session, stream, down_node, input, event))) + ERR("Failed to handle stream event, hr %#lx\n", hr); + IMFMediaEvent_Release(event); } else if (transform_node_has_requests(topo_node)) { @@ -3419,12 +3751,10 @@ static HRESULT session_request_sample_from_node(struct media_session *session, I stream->requests++; transform_node_deliver_samples(session, topo_node); } - - IMFTopologyNode_Release(down_node); break; } case MF_TOPOLOGY_TEE_NODE: - FIXME("Unhandled upstream node type %d.\n", node_type); + FIXME("Unhandled upstream node type %d.\n", topo_node->type); default: hr = E_UNEXPECTED; } @@ -3434,10 +3764,8 @@ static HRESULT session_request_sample_from_node(struct media_session *session, I static void session_request_sample(struct media_session *session, IMFStreamSink *sink_stream) { - struct topo_node *sink_node = NULL, *node; - IMFTopologyNode *upstream_node; - DWORD upstream_output; - HRESULT hr; + struct topo_node *sink_node = NULL, *node, *up_node; + DWORD output; LIST_FOR_EACH_ENTRY(node, &session->presentation.nodes, struct topo_node, entry) { @@ -3451,24 +3779,21 @@ static void session_request_sample(struct media_session *session, IMFStreamSink if (!sink_node) return; - if (FAILED(hr = IMFTopologyNode_GetInput(sink_node->node, 0, &upstream_node, &upstream_output))) + if (!(up_node = session_get_topo_node_input(session, sink_node, 0, &output))) { - WARN("Failed to get upstream node connection, hr %#lx.\n", hr); + WARN("Failed to node %p/%u input\n", sink_node, 0); return; } sink_node->u.sink.requests++; - if (FAILED(session_request_sample_from_node(session, upstream_node, upstream_output))) + if (FAILED(session_request_sample_from_node(session, up_node, output))) sink_node->u.sink.requests--; - IMFTopologyNode_Release(upstream_node); } static void session_deliver_sample(struct media_session *session, IMFMediaStream *stream, const PROPVARIANT *value) { struct topo_node *source_node = NULL, *node; - IMFTopologyNode *downstream_node; - DWORD downstream_input; - HRESULT hr; + DWORD input; if (value && (value->vt != VT_UNKNOWN || !value->punkVal)) { @@ -3491,14 +3816,13 @@ static void session_deliver_sample(struct media_session *session, IMFMediaStream if (!value) source_node->flags |= TOPO_NODE_END_OF_STREAM; - if (FAILED(hr = IMFTopologyNode_GetOutput(source_node->node, 0, &downstream_node, &downstream_input))) + if (!(node = session_get_topo_node_output(session, source_node, 0, &input))) { - WARN("Failed to get downstream node connection, hr %#lx.\n", hr); + WARN("Failed to node %p/%u output.\n", source_node, 0); return; } - session_deliver_sample_to_node(session, downstream_node, downstream_input, value ? (IMFSample *)value->punkVal : NULL); - IMFTopologyNode_Release(downstream_node); + session_deliver_sample_to_node(session, node, input, value ? (IMFSample *)value->punkVal : NULL); } static void session_sink_invalidated(struct media_session *session, IMFMediaEvent *event, IMFStreamSink *sink) diff --git a/dlls/mf/tests/abgr32frame-crop.bmp b/dlls/mf/tests/abgr32frame-crop.bmp new file mode 100644 index 00000000000..2e18f395cbb Binary files /dev/null and b/dlls/mf/tests/abgr32frame-crop.bmp differ diff --git a/dlls/mf/tests/mf.c b/dlls/mf/tests/mf.c index 0a34329bd75..9382d8cae35 100644 --- a/dlls/mf/tests/mf.c +++ b/dlls/mf/tests/mf.c @@ -80,6 +80,7 @@ extern GUID DMOVideoFormat_RGB32; HRESULT (WINAPI *pMFCreateSampleCopierMFT)(IMFTransform **copier); HRESULT (WINAPI *pMFGetTopoNodeCurrentType)(IMFTopologyNode *node, DWORD stream, BOOL output, IMFMediaType **type); HRESULT (WINAPI *pMFCreateDXGIDeviceManager)(UINT *token, IMFDXGIDeviceManager **manager); +HRESULT (WINAPI *pMFCreateVideoSampleAllocatorEx)(REFIID riid, void **obj); BOOL has_video_processor; static BOOL is_vista(void) @@ -5095,7 +5096,9 @@ static void test_sample_grabber_orientation(GUID subtype) { const struct buffer_desc buffer_desc_rgb32 = { - .length = 64 * 64 * 4, .compare = compare_rgb32, .dump = dump_rgb32, .rect = {.right = 64, .bottom = 64}, + .length = 64 * 64 * 4, + .compare = compare_rgb32, .compare_rect = {.right = 64, .bottom = 64}, + .dump = dump_rgb32, .size = {.cx = 64, .cy = 64}, }; const struct sample_desc sample_desc_rgb32 = { @@ -5107,7 +5110,9 @@ static void test_sample_grabber_orientation(GUID subtype) { const struct buffer_desc buffer_desc_nv12 = { - .length = 64 * 64 * 3 / 2, .compare = compare_nv12, .dump = dump_nv12, .rect = {.right = 64, .bottom = 64}, + .length = 64 * 64 * 3 / 2, + .compare = compare_nv12, .compare_rect = {.right = 64, .bottom = 64}, + .dump = dump_nv12, .size = {.cx = 64, .cy = 64}, }; const struct sample_desc sample_desc_nv12 = { @@ -6895,6 +6900,7 @@ void init_functions(void) mod = GetModuleHandleA("mfplat.dll"); X(MFCreateDXGIDeviceManager); + X(MFCreateVideoSampleAllocatorEx); #undef X hr = CoInitialize(NULL); diff --git a/dlls/mf/tests/mf_test.h b/dlls/mf/tests/mf_test.h index 5a247e4a0ef..b3247ba00cc 100644 --- a/dlls/mf/tests/mf_test.h +++ b/dlls/mf/tests/mf_test.h @@ -33,6 +33,7 @@ extern HRESULT (WINAPI *pMFCreateSampleCopierMFT)(IMFTransform **copier); extern HRESULT (WINAPI *pMFGetTopoNodeCurrentType)(IMFTopologyNode *node, DWORD stream, BOOL output, IMFMediaType **type); extern HRESULT (WINAPI *pMFCreateDXGIDeviceManager)(UINT *token, IMFDXGIDeviceManager **manager); +extern HRESULT (WINAPI *pMFCreateVideoSampleAllocatorEx)(REFIID riid, void **obj); extern BOOL has_video_processor; void init_functions(void); @@ -74,28 +75,29 @@ extern void check_attributes_(const char *file, int line, IMFAttributes *attribu const struct attribute_desc *desc, ULONG limit); extern void init_media_type(IMFMediaType *mediatype, const struct attribute_desc *desc, ULONG limit); -typedef DWORD (*compare_cb)(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_nv12(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_i420(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_rgb32(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_rgb24(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_rgb16(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); -extern DWORD compare_pcm16(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect); +typedef DWORD (*compare_cb)(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_nv12(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_i420(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_rgb32(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_rgb24(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_rgb16(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); +extern DWORD compare_pcm16(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect); -typedef void (*dump_cb)(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); -extern void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); -extern void dump_rgb24(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); -extern void dump_rgb16(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); -extern void dump_nv12(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); -extern void dump_i420(const BYTE *data, DWORD length, const RECT *rect, HANDLE output); +typedef void (*dump_cb)(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); +extern void dump_rgb32(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); +extern void dump_rgb24(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); +extern void dump_rgb16(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); +extern void dump_nv12(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); +extern void dump_i420(const BYTE *data, DWORD length, const SIZE *size, HANDLE output); struct buffer_desc { DWORD length; BOOL todo_length; compare_cb compare; + RECT compare_rect; dump_cb dump; - RECT rect; + SIZE size; }; struct sample_desc diff --git a/dlls/mf/tests/resource.rc b/dlls/mf/tests/resource.rc index ab1fb7ecbb0..6c9a6601c24 100644 --- a/dlls/mf/tests/resource.rc +++ b/dlls/mf/tests/resource.rc @@ -105,6 +105,18 @@ rgb32frame.bmp RCDATA rgb32frame.bmp /* @makedep: rgb32frame-flip.bmp */ rgb32frame-flip.bmp RCDATA rgb32frame-flip.bmp +/* Generated from running the tests on Windows */ +/* @makedep: rgb32frame-crop.bmp */ +rgb32frame-crop.bmp RCDATA rgb32frame-crop.bmp + +/* Generated from running the tests on Windows */ +/* @makedep: rgb32frame-crop-flip.bmp */ +rgb32frame-crop-flip.bmp RCDATA rgb32frame-crop-flip.bmp + +/* Generated from running the tests on Windows */ +/* @makedep: abgr32frame-crop.bmp */ +abgr32frame-crop.bmp RCDATA abgr32frame-crop.bmp + /* Generated from running the tests on Windows */ /* @makedep: rgb32frame-grabber.bmp */ rgb32frame-grabber.bmp RCDATA rgb32frame-grabber.bmp diff --git a/dlls/mf/tests/rgb32frame-crop-flip.bmp b/dlls/mf/tests/rgb32frame-crop-flip.bmp new file mode 100644 index 00000000000..34a28ad4473 Binary files /dev/null and b/dlls/mf/tests/rgb32frame-crop-flip.bmp differ diff --git a/dlls/mf/tests/rgb32frame-crop.bmp b/dlls/mf/tests/rgb32frame-crop.bmp new file mode 100644 index 00000000000..310824a5e10 Binary files /dev/null and b/dlls/mf/tests/rgb32frame-crop.bmp differ diff --git a/dlls/mf/tests/transform.c b/dlls/mf/tests/transform.c index 84cf32b8f2d..3f9541af667 100644 --- a/dlls/mf/tests/transform.c +++ b/dlls/mf/tests/transform.c @@ -55,10 +55,10 @@ DEFINE_GUID(DMOVideoFormat_RGB555,D3DFMT_X1R5G5B5,0x524f,0x11ce,0x9f,0x53,0x00,0 DEFINE_GUID(DMOVideoFormat_RGB565,D3DFMT_R5G6B5,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); DEFINE_GUID(DMOVideoFormat_RGB8,D3DFMT_P8,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); DEFINE_GUID(MFAudioFormat_RAW_AAC1,WAVE_FORMAT_RAW_AAC1,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71); -DEFINE_GUID(MFVideoFormat_ABGR32,0x00000020,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71); -DEFINE_GUID(MFVideoFormat_P208,0x38303250,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71); -DEFINE_GUID(MFVideoFormat_VC1S,0x53314356,0x0000,0x0010,0x80,0x00,0x00,0xaa,0x00,0x38,0x9b,0x71); DEFINE_GUID(MFVideoFormat_WMV_Unknown,0x7ce12ca9,0xbfbf,0x43d9,0x9d,0x00,0x82,0xb8,0xed,0x54,0x31,0x6b); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32,D3DFMT_A8B8G8R8); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_P208,MAKEFOURCC('P','2','0','8')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_VC1S,MAKEFOURCC('V','C','1','S')); DEFINE_MEDIATYPE_GUID(MEDIASUBTYPE_IV50,MAKEFOURCC('I','V','5','0')); DEFINE_GUID(mft_output_sample_incomplete,0xffffff,0xffff,0xffff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff); @@ -442,8 +442,8 @@ void init_media_type(IMFMediaType *mediatype, const struct attribute_desc *desc, } } -static void init_dmo_media_type_video(DMO_MEDIA_TYPE *media_type, - const GUID *subtype, const LONG width, const LONG height) +static void init_dmo_media_type_video(DMO_MEDIA_TYPE *media_type, const GUID *subtype, + const LONG width, const LONG height, const REFERENCE_TIME time_per_frame) { UINT32 image_size = 0, extra_bytes = subtype_to_extra_bytes(subtype); VIDEOINFOHEADER *header = (VIDEOINFOHEADER *)(media_type + 1); @@ -460,6 +460,7 @@ static void init_dmo_media_type_video(DMO_MEDIA_TYPE *media_type, header->rcTarget.left = 0; header->rcTarget.right = width; header->rcTarget.bottom = height; + header->AvgTimePerFrame = time_per_frame; header->bmiHeader.biSize = sizeof(header->bmiHeader); header->bmiHeader.biWidth = width; header->bmiHeader.biHeight = height; @@ -772,20 +773,22 @@ static void check_mft_set_output_type_required_(int line, IMFTransform *transfor ok_(__FILE__, line)(!ref, "Release returned %lu\n", ref); } -static void check_mft_set_output_type(IMFTransform *transform, const struct attribute_desc *attributes, - HRESULT expect_hr) +#define check_mft_set_output_type(a, b, c) check_mft_set_output_type_(__LINE__, a, b, c, FALSE) +static void check_mft_set_output_type_(int line, IMFTransform *transform, const struct attribute_desc *attributes, + HRESULT expect_hr, BOOL todo) { IMFMediaType *media_type; HRESULT hr; hr = MFCreateMediaType(&media_type); - ok(hr == S_OK, "MFCreateMediaType returned hr %#lx.\n", hr); + ok_(__FILE__, line)(hr == S_OK, "MFCreateMediaType returned hr %#lx.\n", hr); init_media_type(media_type, attributes, -1); hr = IMFTransform_SetOutputType(transform, 0, media_type, MFT_SET_TYPE_TEST_ONLY); - ok(hr == expect_hr, "SetOutputType returned %#lx.\n", hr); + ok_(__FILE__, line)(hr == expect_hr, "SetOutputType returned %#lx.\n", hr); hr = IMFTransform_SetOutputType(transform, 0, media_type, 0); - ok(hr == expect_hr, "SetOutputType returned %#lx.\n", hr); + todo_wine_if(todo) + ok_(__FILE__, line)(hr == expect_hr, "SetOutputType returned %#lx.\n", hr); IMFMediaType_Release(media_type); } @@ -866,14 +869,14 @@ static HRESULT check_mft_process_output_(int line, IMFTransform *transform, IMFS return ret; } -DWORD compare_nv12(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_nv12(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - DWORD x, y, size, diff = 0, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD x, y, data_size, diff = 0, width = size->cx, height = size->cy; /* skip BMP header and RGB data from the dump */ - size = *(DWORD *)(expect + 2); - *length = *length + size; - expect = expect + size; + data_size = *(DWORD *)(expect + 2); + *length = *length + data_size; + expect = expect + data_size; for (y = 0; y < height; y++, data += width, expect += width) { @@ -896,18 +899,18 @@ DWORD compare_nv12(const BYTE *data, DWORD *length, const RECT *rect, const BYTE } } - size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3 / 2; - return diff * 100 / 256 / size; + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3 / 2; + return diff * 100 / 256 / data_size; } -DWORD compare_i420(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_i420(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - DWORD i, x, y, size, diff = 0, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD i, x, y, data_size, diff = 0, width = size->cx, height = size->cy; /* skip BMP header and RGB data from the dump */ - size = *(DWORD *)(expect + 2); - *length = *length + size; - expect = expect + size; + data_size = *(DWORD *)(expect + 2); + *length = *length + data_size; + expect = expect + data_size; for (y = 0; y < height; y++, data += width, expect += width) { @@ -929,18 +932,44 @@ DWORD compare_i420(const BYTE *data, DWORD *length, const RECT *rect, const BYTE } } - size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3 / 2; - return diff * 100 / 256 / size; + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3 / 2; + return diff * 100 / 256 / data_size; +} + +static DWORD compare_abgr32(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) +{ + DWORD x, y, data_size, diff = 0, width = size->cx, height = size->cy; + + /* skip BMP header from the dump */ + data_size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); + *length = *length + data_size; + expect = expect + data_size; + + for (y = 0; y < height; y++, data += width * 4, expect += width * 4) + { + if (y < rect->top || y >= rect->bottom) continue; + for (x = 0; x < width; x++) + { + if (x < rect->left || x >= rect->right) continue; + diff += abs((int)expect[4 * x + 0] - (int)data[4 * x + 0]); + diff += abs((int)expect[4 * x + 1] - (int)data[4 * x + 1]); + diff += abs((int)expect[4 * x + 2] - (int)data[4 * x + 2]); + diff += abs((int)expect[4 * x + 3] - (int)data[4 * x + 3]); + } + } + + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * 4; + return diff * 100 / 256 / data_size; } -static DWORD compare_rgb(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect, UINT bits) +static DWORD compare_rgb(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect, UINT bits) { - DWORD x, y, step = bits / 8, size, diff = 0, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD x, y, step = bits / 8, data_size, diff = 0, width = size->cx, height = size->cy; /* skip BMP header from the dump */ - size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); - *length = *length + size; - expect = expect + size; + data_size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); + *length = *length + data_size; + expect = expect + data_size; for (y = 0; y < height; y++, data += width * step, expect += width * step) { @@ -954,49 +983,49 @@ static DWORD compare_rgb(const BYTE *data, DWORD *length, const RECT *rect, cons } } - size = (rect->right - rect->left) * (rect->bottom - rect->top) * min(step, 3); - return diff * 100 / 256 / size; + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * min(step, 3); + return diff * 100 / 256 / data_size; } -DWORD compare_rgb32(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_rgb32(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - return compare_rgb(data, length, rect, expect, 32); + return compare_rgb(data, length, size, rect, expect, 32); } -DWORD compare_rgb24(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_rgb24(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - return compare_rgb(data, length, rect, expect, 24); + return compare_rgb(data, length, size, rect, expect, 24); } -DWORD compare_rgb16(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_rgb16(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - return compare_rgb(data, length, rect, expect, 16); + return compare_rgb(data, length, size, rect, expect, 16); } -DWORD compare_pcm16(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +DWORD compare_pcm16(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { const INT16 *data_pcm = (INT16 *)data, *expect_pcm = (INT16 *)expect; - DWORD i, size = *length / 2, diff = 0; + DWORD i, data_size = *length / 2, diff = 0; - for (i = 0; i < size; i++) + for (i = 0; i < data_size; i++) diff += abs((int)*expect_pcm++ - (int)*data_pcm++); - return diff * 100 / 65536 / size; + return diff * 100 / 65536 / data_size; } -static DWORD compare_bytes(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +static DWORD compare_bytes(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - DWORD i, size = *length, diff = 0; + DWORD i, data_size = *length, diff = 0; - for (i = 0; i < size; i++) + for (i = 0; i < data_size; i++) diff += abs((int)*expect++ - (int)*data++); - return diff * 100 / 256 / size; + return diff * 100 / 256 / data_size; } -static void dump_rgb(const BYTE *data, DWORD length, const RECT *rect, HANDLE output, UINT bits) +static void dump_rgb(const BYTE *data, DWORD length, const SIZE *size, HANDLE output, UINT bits) { - DWORD width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD width = size->cx, height = size->cy; static const char magic[2] = "BM"; struct { @@ -1027,24 +1056,24 @@ static void dump_rgb(const BYTE *data, DWORD length, const RECT *rect, HANDLE ou ok(written == length, "written %lu bytes\n", written); } -void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +void dump_rgb32(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) { - return dump_rgb(data, length, rect, output, 32); + return dump_rgb(data, length, size, output, 32); } -void dump_rgb24(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +void dump_rgb24(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) { - return dump_rgb(data, length, rect, output, 24); + return dump_rgb(data, length, size, output, 24); } -void dump_rgb16(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +void dump_rgb16(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) { - return dump_rgb(data, length, rect, output, 16); + return dump_rgb(data, length, size, output, 16); } -void dump_nv12(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +void dump_nv12(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) { - DWORD written, x, y, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD written, x, y, width = size->cx, height = size->cy; BYTE *rgb32_data = malloc(width * height * 4), *rgb32 = rgb32_data; BOOL ret; @@ -1056,7 +1085,7 @@ void dump_nv12(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) *rgb32++ = 0xff; } - dump_rgb32(rgb32_data, width * height * 4, rect, output); + dump_rgb32(rgb32_data, width * height * 4, size, output); free(rgb32_data); ret = WriteFile(output, data, length, &written, NULL); @@ -1064,9 +1093,9 @@ void dump_nv12(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) ok(written == length, "written %lu bytes\n", written); } -void dump_i420(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +void dump_i420(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) { - DWORD written, x, y, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD written, x, y, width = size->cx, height = size->cy; BYTE *rgb32_data = malloc(width * height * 4), *rgb32 = rgb32_data; BOOL ret; @@ -1078,7 +1107,7 @@ void dump_i420(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) *rgb32++ = 0xff; } - dump_rgb32(rgb32_data, width * height * 4, rect, output); + dump_rgb32(rgb32_data, width * height * 4, size, output); free(rgb32_data); ret = WriteFile(output, data, length, &written, NULL); @@ -1151,7 +1180,7 @@ static void dump_mf_media_buffer(IMFMediaBuffer *buffer, const struct buffer_des ok(hr == S_OK, "Lock returned %#lx\n", hr); if (buffer_desc->dump) - buffer_desc->dump(data, length, &buffer_desc->rect, output); + buffer_desc->dump(data, length, &buffer_desc->size, output); else { if (buffer_desc->length == -1) @@ -1219,9 +1248,9 @@ static DWORD check_mf_media_buffer_(const char *file, int line, IMFMediaBuffer * todo_wine_if(expect->todo_length) ok_(file, line)(0, "missing %#lx bytes\n", length - *expect_data_len); else if (!expect->compare) - diff = compare_bytes(data, &length, NULL, *expect_data); + diff = compare_bytes(data, &length, NULL, NULL, *expect_data); else - diff = expect->compare(data, &length, &expect->rect, *expect_data); + diff = expect->compare(data, &length, &expect->size, &expect->compare_rect, *expect_data); } hr = IMFMediaBuffer_Unlock(buffer); @@ -1453,9 +1482,9 @@ static DWORD check_dmo_output_data_buffer_(int line, DMO_OUTPUT_DATA_BUFFER *out if (data_length < buffer_length) ok_(__FILE__, line)(0, "Missing %#lx bytes\n", buffer_length - data_length); else if (!buffer_desc->compare) - diff = compare_bytes(buffer, &buffer_length, NULL, data); + diff = compare_bytes(buffer, &buffer_length, NULL, NULL, data); else - diff = buffer_desc->compare(buffer, &buffer_length, &buffer_desc->rect, data); + diff = buffer_desc->compare(buffer, &buffer_length, &buffer_desc->size, &buffer_desc->compare_rect, data); return diff; } @@ -1476,11 +1505,11 @@ static void check_dmo_get_output_size_info_video_(int line, IMediaObject *dmo, hr = IMediaObject_SetOutputType(dmo, 0, NULL, DMO_SET_TYPEF_CLEAR); ok_(__FILE__, line)(hr == S_OK, "Failed to clear output type, hr %#lx.\n", hr); - init_dmo_media_type_video(type, input_subtype, width, height); + init_dmo_media_type_video(type, input_subtype, width, height, 0); hr = IMediaObject_SetInputType(dmo, 0, type, 0); ok_(__FILE__, line)(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(type, output_subtype, width, height); + init_dmo_media_type_video(type, output_subtype, width, height, 0); hr = IMediaObject_SetOutputType(dmo, 0, type, 0); todo_wine_if(IsEqualGUID(output_subtype, &MEDIASUBTYPE_NV11) || IsEqualGUID(output_subtype, &MEDIASUBTYPE_IYUV)) @@ -3490,37 +3519,26 @@ static void test_wma_decoder_dmo_input_type(void) hr = IMediaObject_SetInputType(dmo, 0, NULL, DMO_SET_TYPEF_CLEAR); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 1, NULL); - todo_wine ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 0, NULL); - todo_wine ok(hr == DMO_E_TYPE_NOT_SET, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 1, &type); - todo_wine ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 0, &type); - todo_wine ok(hr == DMO_E_TYPE_NOT_SET, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_SetInputType(dmo, 0, good_input_type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 1, NULL); - todo_wine ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 0, NULL); - todo_wine ok(hr == E_POINTER, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 1, &type); - todo_wine ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetInputCurrentType(dmo, 0, &type); - todo_wine ok(hr == S_OK, "GetInputCurrentType returned %#lx.\n", hr); - if (hr == S_OK) - { - check_dmo_media_type(&type, good_input_type); - MoFreeMediaType(&type); - } + check_dmo_media_type(&type, good_input_type); + MoFreeMediaType(&type); /* Cleanup. */ ret = IMediaObject_Release(dmo); @@ -3567,6 +3585,8 @@ static void test_wma_decoder_dmo_output_type(void) good_output_type = (void *)buffer_good_output; bad_output_type = (void *)buffer_bad_output; init_dmo_media_type_audio(input_type, input_subtype, channel_count, rate, 16); + ((WAVEFORMATEX *)(input_type + 1))->nBlockAlign = 640; + ((WAVEFORMATEX *)(input_type + 1))->nAvgBytesPerSec = 2000; init_dmo_media_type_audio(good_output_type, &MEDIASUBTYPE_PCM, channel_count, rate, bits_per_sample); memset(bad_output_type, 0, sizeof(buffer_bad_output)); @@ -3692,8 +3712,6 @@ static void test_wma_decoder_dmo_output_type(void) /* Test GetOutputCurrentType. */ hr = IMediaObject_SetOutputType(dmo, 0, NULL, DMO_SET_TYPEF_CLEAR); ok(hr == S_OK, "SetOutputType returned %#lx.\n", hr); - todo_wine - { hr = IMediaObject_GetOutputCurrentType(dmo, 1, NULL); ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetOutputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, NULL); @@ -3702,12 +3720,9 @@ static void test_wma_decoder_dmo_output_type(void) ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetOutputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, &type); ok(hr == DMO_E_TYPE_NOT_SET, "GetOutputCurrentType returned %#lx.\n", hr); - } hr = IMediaObject_SetOutputType(dmo, 0, good_output_type, 0); ok(hr == S_OK, "SetOutputType returned %#lx.\n", hr); - todo_wine - { hr = IMediaObject_GetOutputCurrentType(dmo, 1, NULL); ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetOutputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, NULL); @@ -3716,12 +3731,8 @@ static void test_wma_decoder_dmo_output_type(void) ok(hr == DMO_E_INVALIDSTREAMINDEX, "GetOutputCurrentType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, &type); ok(hr == S_OK, "GetOutputCurrentType returned %#lx.\n", hr); - } - if (hr == S_OK) - { - check_dmo_media_type(&type, good_output_type); - MoFreeMediaType(&type); - } + check_dmo_media_type(&type, good_output_type); + MoFreeMediaType(&type); /* Test GetOutputSizeInfo. */ hr = IMediaObject_GetOutputSizeInfo(dmo, 1, NULL, NULL); @@ -3738,20 +3749,17 @@ static void test_wma_decoder_dmo_output_type(void) ok(alignment == 1, "Unexpected alignment %lu.\n", alignment); hr = IMediaObject_GetInputCurrentType(dmo, 0, input_type); - todo_wine ok(hr == S_OK, "GetInputCurrentType returned %#lx.\n", hr); hr = IMediaObject_SetInputType(dmo, 0, input_type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, &type); - todo_wine ok(hr == S_OK, "GetOutputCurrentType returned %#lx.\n", hr); init_dmo_media_type_audio(input_type, input_subtype, channel_count, rate * 2, 32); hr = IMediaObject_SetInputType(dmo, 0, input_type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_GetOutputCurrentType(dmo, 0, &type); - todo_wine - ok(hr == DMO_E_TYPE_NOT_SET, "GetOutputCurrentType returned %#lx.\n", hr); + todo_wine ok(hr == DMO_E_TYPE_NOT_SET, "GetOutputCurrentType returned %#lx.\n", hr); /* Cleanup. */ ret = IMediaObject_Release(dmo); @@ -4191,7 +4199,8 @@ static void test_h264_decoder(void) const struct buffer_desc output_buffer_desc_nv12 = { .length = actual_width * actual_height * 3 / 2, - .compare = compare_nv12, .dump = dump_nv12, .rect = {.right = 82, .bottom = 84}, + .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_nv12, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc output_sample_desc_nv12 = { @@ -4202,7 +4211,8 @@ static void test_h264_decoder(void) const struct buffer_desc output_buffer_desc_i420 = { .length = actual_width * actual_height * 3 / 2, - .compare = compare_i420, .dump = dump_i420, .rect = {.right = 82, .bottom = 84}, + .compare = compare_i420, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_i420, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc expect_output_sample_i420 = { @@ -5838,12 +5848,14 @@ static void test_wmv_decoder(void) const struct buffer_desc output_buffer_desc_nv12 = { .length = actual_width * actual_height * 3 / 2, - .compare = compare_nv12, .dump = dump_nv12, .rect = {.right = 82, .bottom = 84}, + .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_nv12, .size = {.cx = actual_width, .cy = actual_height}, }; const struct buffer_desc output_buffer_desc_rgb = { .length = actual_width * actual_height * 4, - .compare = compare_rgb32, .dump = dump_rgb32, .rect = {.right = 82, .bottom = 84}, + .compare = compare_rgb32, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_rgb32, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc output_sample_desc_nv12 = { @@ -5876,6 +5888,7 @@ static void test_wmv_decoder(void) const struct sample_desc *output_sample_desc; const WCHAR *result_bitmap; ULONG delta; + BOOL todo; } transform_tests[] = { @@ -5924,7 +5937,7 @@ static void test_wmv_decoder(void) }, { - /* WMV1 -> RGB (positive stride */ + /* WMV1 -> RGB (positive stride) */ .output_type_desc = output_type_desc_rgb_positive_stride, .expect_output_type_desc = expect_output_type_desc_rgb, .expect_input_info = &expect_input_info_rgb, @@ -6106,6 +6119,10 @@ static void test_wmv_decoder(void) transform_tests[j].result_bitmap); ok(ret <= transform_tests[j].delta, "got %lu%% diff\n", ret); IMFCollection_Release(output_samples); + + hr = IMFTransform_SetOutputType(transform, 0, NULL, 0); + ok(hr == S_OK, "SetOutputType returned %#lx\n", hr); + winetest_pop_context(); } @@ -6163,7 +6180,7 @@ static void test_wmv_decoder_dmo_input_type(void) good_input_type = (void *)buffer_good_input; bad_input_type = (void *)buffer_bad_input; - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); memset(bad_input_type, 0, sizeof(buffer_bad_input)); header = (void *)(good_input_type + 1); @@ -6269,7 +6286,7 @@ static void test_wmv_decoder_dmo_input_type(void) winetest_push_context("type %lu", i); - init_dmo_media_type_video(good_input_type, subtype, width, height); + init_dmo_media_type_video(good_input_type, subtype, width, height, 0); hr = IMediaObject_SetInputType(dmo, 0, good_input_type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_CLEAR); @@ -6282,7 +6299,7 @@ static void test_wmv_decoder_dmo_input_type(void) winetest_pop_context(); } - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); header->dwBitRate = 0xdeadbeef; header->dwBitErrorRate = 0xdeadbeef; header->AvgTimePerFrame = 0xdeadbeef; @@ -6294,31 +6311,31 @@ static void test_wmv_decoder_dmo_input_type(void) hr = IMediaObject_SetInputType(dmo, 0, good_input_type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); good_input_type->majortype = MFMediaType_Default; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, &MEDIASUBTYPE_None, width, height); + init_dmo_media_type_video(good_input_type, &MEDIASUBTYPE_None, width, height, 0); hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); good_input_type->formattype = FORMAT_None; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); good_input_type->cbFormat = 1; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); good_input_type->pbFormat = NULL; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); header->bmiHeader.biSize = 0; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); todo_wine @@ -6332,7 +6349,7 @@ static void test_wmv_decoder_dmo_input_type(void) todo_wine ok(hr == DMO_E_TYPE_NOT_ACCEPTED, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); header->bmiHeader.biWidth = 0; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); todo_wine @@ -6349,7 +6366,7 @@ static void test_wmv_decoder_dmo_input_type(void) hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); header->bmiHeader.biHeight = 0; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); todo_wine @@ -6365,7 +6382,7 @@ static void test_wmv_decoder_dmo_input_type(void) hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(good_input_type, input_subtype, width, height); + init_dmo_media_type_video(good_input_type, input_subtype, width, height, 0); header->bmiHeader.biCompression = 0; hr = IMediaObject_SetInputType(dmo, 0, good_input_type, DMO_SET_TYPEF_TEST_ONLY); todo_wine @@ -6392,6 +6409,7 @@ static void test_wmv_decoder_dmo_output_type(void) char buffer_good_output[2048], buffer_bad_output[2048], buffer_input[2048]; DMO_MEDIA_TYPE *good_output_type, *bad_output_type, *input_type, type; const GUID* input_subtype = &MEDIASUBTYPE_WMV1; + REFERENCE_TIME time_per_frame = 10000000; LONG width = 16, height = 16; DWORD count, i, ret; IMediaObject *dmo; @@ -6420,7 +6438,7 @@ static void test_wmv_decoder_dmo_output_type(void) input_type = (void *)buffer_input; good_output_type = (void *)buffer_good_output; bad_output_type = (void *)buffer_bad_output; - init_dmo_media_type_video(input_type, input_subtype, width, height); + init_dmo_media_type_video(input_type, input_subtype, width, height, time_per_frame); memset(bad_output_type, 0, sizeof(buffer_bad_output)); /* Test GetOutputType. */ @@ -6456,7 +6474,7 @@ static void test_wmv_decoder_dmo_output_type(void) while (SUCCEEDED(hr = IMediaObject_GetOutputType(dmo, 0, ++i, &type))) { winetest_push_context("type %lu", i); - init_dmo_media_type_video(good_output_type, wmv_decoder_output_subtypes[i], width, height); + init_dmo_media_type_video(good_output_type, wmv_decoder_output_subtypes[i], width, height, time_per_frame); check_dmo_media_type(&type, good_output_type); MoFreeMediaType(&type); winetest_pop_context(); @@ -6465,7 +6483,7 @@ static void test_wmv_decoder_dmo_output_type(void) ok(i == count, "%lu types.\n", i); /* Test SetOutputType. */ - init_dmo_media_type_video(good_output_type, &MEDIASUBTYPE_RGB24, width, height); + init_dmo_media_type_video(good_output_type, &MEDIASUBTYPE_RGB24, width, height, time_per_frame); hr = IMediaObject_SetInputType(dmo, 0, NULL, DMO_SET_TYPEF_CLEAR); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); hr = IMediaObject_SetOutputType(dmo, 1, NULL, 0); @@ -6627,7 +6645,8 @@ static void test_wmv_decoder_media_object(void) const struct buffer_desc output_buffer_desc_nv12 = { .length = data_width * data_height * 3 / 2, - .compare = compare_nv12, .dump = dump_nv12, .rect = {.right = 82, .bottom = 84}, + .compare = compare_nv12, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_nv12, .size = {.cx = data_width, .cy = data_height}, }; DWORD in_count, out_count, size, alignment, wmv_data_length, status, expected_status, diff; struct media_buffer *input_media_buffer = NULL, *output_media_buffer = NULL; @@ -6685,10 +6704,10 @@ static void test_wmv_decoder_media_object(void) memcpy(input_media_buffer->data, wmv_data, wmv_data_length); input_media_buffer->length = wmv_data_length; - init_dmo_media_type_video(type, &MEDIASUBTYPE_WMV1, data_width, data_height); + init_dmo_media_type_video(type, &MEDIASUBTYPE_WMV1, data_width, data_height, 0); hr = IMediaObject_SetInputType(media_object, 0, type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(type, &MEDIASUBTYPE_NV12, data_width, data_height); + init_dmo_media_type_video(type, &MEDIASUBTYPE_NV12, data_width, data_height, 0); hr = IMediaObject_SetOutputType(media_object, 0, type, 0); ok(hr == S_OK, "SetOutputType returned %#lx.\n", hr); @@ -6756,12 +6775,10 @@ static void test_wmv_decoder_media_object(void) ok(output_media_buffer->length == 0, "Unexpected length %#lx.\n", output_media_buffer->length); /* Test ProcessOutput with setting framerate. */ - init_dmo_media_type_video(type, &MEDIASUBTYPE_WMV1, data_width, data_height); - ((VIDEOINFOHEADER *)type->pbFormat)->AvgTimePerFrame = 100000; + init_dmo_media_type_video(type, &MEDIASUBTYPE_WMV1, data_width, data_height, 100000); hr = IMediaObject_SetInputType(media_object, 0, type, 0); ok(hr == S_OK, "SetInputType returned %#lx.\n", hr); - init_dmo_media_type_video(type, &MEDIASUBTYPE_NV12, data_width, data_height); - ((VIDEOINFOHEADER *)type->pbFormat)->AvgTimePerFrame = 200000; + init_dmo_media_type_video(type, &MEDIASUBTYPE_NV12, data_width, data_height, 200000); hr = IMediaObject_SetOutputType(media_object, 0, type, 0); ok(hr == S_OK, "SetOutputType returned %#lx.\n", hr); @@ -7023,7 +7040,8 @@ static void test_color_convert(void) const struct buffer_desc output_buffer_desc = { .length = actual_width * actual_height * 4, - .compare = compare_rgb32, .dump = dump_rgb32, .rect = {.right = 82, .bottom = 84}, + .compare = compare_rgb32, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_rgb32, .size = {.cx = actual_width, .cy = actual_height}, }; const struct attribute_desc output_sample_attributes[] = { @@ -7337,6 +7355,7 @@ static void test_video_processor(void) static const struct attribute_desc expect_transform_attributes[] = { ATTR_UINT32(MFT_SUPPORT_3DVIDEO, 1, .todo = TRUE), + ATTR_UINT32(MF_SA_D3D11_AWARE, 1), /* ATTR_UINT32(MF_SA_D3D_AWARE, 1), only on W7 */ {0}, }; @@ -7351,6 +7370,15 @@ static void test_video_processor(void) ATTR_BLOB(MF_MT_MINIMUM_DISPLAY_APERTURE, &actual_aperture, 16), {0}, }; + const struct attribute_desc rgb32_with_aperture_positive_stride[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32, .required = TRUE), + ATTR_RATIO(MF_MT_FRAME_SIZE, actual_width, actual_height, .required = TRUE), + ATTR_BLOB(MF_MT_MINIMUM_DISPLAY_APERTURE, &actual_aperture, 16), + ATTR_UINT32(MF_MT_DEFAULT_STRIDE, actual_width * 4), + {0}, + }; const struct attribute_desc nv12_default_stride[] = { ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), @@ -7413,17 +7441,19 @@ static void test_video_processor(void) }; const struct attribute_desc nv12_with_aperture[] = { - ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12), - ATTR_RATIO(MF_MT_FRAME_SIZE, actual_width, actual_height), + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12, .required = TRUE), + ATTR_RATIO(MF_MT_FRAME_SIZE, actual_width, actual_height, .required = TRUE), ATTR_BLOB(MF_MT_MINIMUM_DISPLAY_APERTURE, &actual_aperture, 16), + ATTR_BLOB(MF_MT_GEOMETRIC_APERTURE, &actual_aperture, 16), + ATTR_BLOB(MF_MT_PAN_SCAN_APERTURE, &actual_aperture, 16), {0}, }; const struct attribute_desc rgb32_no_aperture[] = { - ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), - ATTR_RATIO(MF_MT_FRAME_SIZE, 82, 84), + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video, .required = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32, .required = TRUE), + ATTR_RATIO(MF_MT_FRAME_SIZE, 82, 84, .required = TRUE), {0}, }; const MFT_OUTPUT_STREAM_INFO initial_output_info = {0}; @@ -7439,7 +7469,8 @@ static void test_video_processor(void) const struct buffer_desc rgb32_buffer_desc = { .length = actual_width * actual_height * 4, - .compare = compare_rgb32, .dump = dump_rgb32, .rect = {.top = 12, .right = 82, .bottom = 96}, + .compare = compare_rgb32, .compare_rect = {.top = 12, .right = 82, .bottom = 96}, + .dump = dump_rgb32, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc rgb32_sample_desc = { @@ -7448,10 +7479,24 @@ static void test_video_processor(void) .buffer_count = 1, .buffers = &rgb32_buffer_desc, }; + const struct buffer_desc rgb32_crop_buffer_desc = + { + .length = actual_aperture.Area.cx * actual_aperture.Area.cy * 4, + .compare = compare_rgb32, .compare_rect = {.right = actual_aperture.Area.cx, .bottom = actual_aperture.Area.cy}, + .dump = dump_rgb32, .size = actual_aperture.Area, + }; + const struct sample_desc rgb32_crop_sample_desc = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 10000000, + .buffer_count = 1, .buffers = &rgb32_crop_buffer_desc, + }; + const struct buffer_desc rgb555_buffer_desc = { .length = actual_width * actual_height * 2, - .compare = compare_rgb16, .dump = dump_rgb16, .rect = {.top = 12, .right = 82, .bottom = 96}, + .compare = compare_rgb16, .compare_rect = {.top = 12, .right = 82, .bottom = 96}, + .dump = dump_rgb16, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc rgb555_sample_desc = { @@ -7463,7 +7508,8 @@ static void test_video_processor(void) const struct buffer_desc nv12_buffer_desc = { .length = actual_width * actual_height * 3 / 2, - .compare = compare_nv12, .dump = dump_nv12, .rect = {.top = 12, .right = 82, .bottom = 96}, + .compare = compare_nv12, .compare_rect = {.top = 12, .right = 82, .bottom = 96}, + .dump = dump_nv12, .size = {.cx = actual_width, .cy = actual_height}, }; const struct sample_desc nv12_sample_desc = { @@ -7475,84 +7521,109 @@ static void test_video_processor(void) const struct transform_desc { const struct attribute_desc *input_type_desc; + const WCHAR *input_bitmap; const struct attribute_desc *output_type_desc; const struct sample_desc *output_sample_desc; - const WCHAR *result_bitmap; + const WCHAR *output_bitmap; ULONG delta; BOOL broken; } video_processor_tests[] = { { - .input_type_desc = nv12_default_stride, .output_type_desc = rgb32_default_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame-flip.bmp", - .delta = 2, /* Windows returns 0, Wine needs 2 */ + .input_type_desc = nv12_default_stride, .input_bitmap = L"nv12frame.bmp", + .output_type_desc = rgb32_default_stride, .output_bitmap = L"rgb32frame-flip.bmp", + .output_sample_desc = &rgb32_sample_desc, .delta = 2, /* Windows returns 0, Wine needs 2 */ }, { - .input_type_desc = nv12_default_stride, .output_type_desc = rgb32_negative_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame-flip.bmp", - .delta = 2, /* Windows returns 0, Wine needs 2 */ + .input_type_desc = nv12_default_stride, .input_bitmap = L"nv12frame.bmp", + .output_type_desc = rgb32_negative_stride, .output_bitmap = L"rgb32frame-flip.bmp", + .output_sample_desc = &rgb32_sample_desc, .delta = 2, /* Windows returns 0, Wine needs 2 */ }, { - .input_type_desc = nv12_default_stride, .output_type_desc = rgb32_positive_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame.bmp", - .delta = 6, + .input_type_desc = nv12_default_stride, .input_bitmap = L"nv12frame.bmp", + .output_type_desc = rgb32_positive_stride, .output_bitmap = L"rgb32frame.bmp", + .output_sample_desc = &rgb32_sample_desc, .delta = 6, + }, + { + .input_type_desc = rgb32_default_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = nv12_default_stride, .output_bitmap = L"nv12frame-flip.bmp", + .output_sample_desc = &nv12_sample_desc, .delta = 2, /* Windows returns 0, Wine needs 2 */ + }, + { + .input_type_desc = rgb32_negative_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = nv12_default_stride, .output_bitmap = L"nv12frame-flip.bmp", + .output_sample_desc = &nv12_sample_desc, .delta = 2, /* Windows returns 0, Wine needs 2 */ + }, + { + .input_type_desc = rgb32_positive_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = nv12_default_stride, .output_bitmap = L"nv12frame.bmp", + .output_sample_desc = &nv12_sample_desc, .delta = 2, /* Windows returns 1, Wine needs 2 */ + }, + { + .input_type_desc = rgb32_negative_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_negative_stride, .output_bitmap = L"rgb32frame.bmp", + .output_sample_desc = &rgb32_sample_desc, }, { - .input_type_desc = rgb32_default_stride, .output_type_desc = nv12_default_stride, - .output_sample_desc = &nv12_sample_desc, .result_bitmap = L"nv12frame-flip.bmp", - .delta = 2, /* Windows returns 0, Wine needs 2 */ + .input_type_desc = rgb32_negative_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_positive_stride, .output_bitmap = L"rgb32frame-flip.bmp", + .output_sample_desc = &rgb32_sample_desc, .delta = 3, /* Windows returns 3 */ }, { - .input_type_desc = rgb32_negative_stride, .output_type_desc = nv12_default_stride, - .output_sample_desc = &nv12_sample_desc, .result_bitmap = L"nv12frame-flip.bmp", - .delta = 2, /* Windows returns 0, Wine needs 2 */ + .input_type_desc = rgb32_positive_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_negative_stride, .output_bitmap = L"rgb32frame-flip.bmp", + .output_sample_desc = &rgb32_sample_desc, .delta = 3, /* Windows returns 3 */ }, { - .input_type_desc = rgb32_positive_stride, .output_type_desc = nv12_default_stride, - .output_sample_desc = &nv12_sample_desc, .result_bitmap = L"nv12frame.bmp", - .delta = 2, /* Windows returns 1, Wine needs 2 */ + .input_type_desc = rgb32_positive_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_positive_stride, .output_bitmap = L"rgb32frame.bmp", + .output_sample_desc = &rgb32_sample_desc, }, { - .input_type_desc = rgb32_negative_stride, .output_type_desc = rgb32_negative_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame.bmp", + .input_type_desc = rgb32_with_aperture, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_with_aperture, .output_bitmap = L"rgb32frame.bmp", + .output_sample_desc = &rgb32_sample_desc, .broken = TRUE /* old Windows version incorrectly rescale */ }, { - .input_type_desc = rgb32_negative_stride, .output_type_desc = rgb32_positive_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame-flip.bmp", - .delta = 3, /* Windows returns 3 */ + .input_type_desc = rgb32_default_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb555_default_stride, .output_bitmap = L"rgb555frame.bmp", + .output_sample_desc = &rgb555_sample_desc, }, { - .input_type_desc = rgb32_positive_stride, .output_type_desc = rgb32_negative_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame-flip.bmp", - .delta = 3, /* Windows returns 3 */ + .input_type_desc = rgb32_default_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb555_negative_stride, .output_bitmap = L"rgb555frame.bmp", + .output_sample_desc = &rgb555_sample_desc, }, { - .input_type_desc = rgb32_positive_stride, .output_type_desc = rgb32_positive_stride, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame.bmp", + .input_type_desc = rgb32_default_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb555_positive_stride, .output_bitmap = L"rgb555frame-flip.bmp", + .output_sample_desc = &rgb555_sample_desc, .delta = 3, /* Windows returns 0, Wine needs 3 */ }, { - .input_type_desc = rgb32_with_aperture, .output_type_desc = rgb32_with_aperture, - .output_sample_desc = &rgb32_sample_desc, .result_bitmap = L"rgb32frame.bmp", - .broken = TRUE /* old Windows version incorrectly rescale */ + .input_type_desc = rgb555_default_stride, .input_bitmap = L"rgb555frame.bmp", + .output_type_desc = rgb555_positive_stride, .output_bitmap = L"rgb555frame-flip.bmp", + .output_sample_desc = &rgb555_sample_desc, .delta = 4, /* Windows returns 0, Wine needs 4 */ }, { - .input_type_desc = rgb32_default_stride, .output_type_desc = rgb555_default_stride, - .output_sample_desc = &rgb555_sample_desc, .result_bitmap = L"rgb555frame.bmp", + .input_type_desc = nv12_with_aperture, .input_bitmap = L"nv12frame.bmp", + .output_type_desc = rgb32_no_aperture, .output_bitmap = L"rgb32frame-crop-flip.bmp", + .output_sample_desc = &rgb32_crop_sample_desc, .delta = 2, /* Windows returns 0, Wine needs 2 */ }, { - .input_type_desc = rgb32_default_stride, .output_type_desc = rgb555_negative_stride, - .output_sample_desc = &rgb555_sample_desc, .result_bitmap = L"rgb555frame.bmp", + .input_type_desc = rgb32_no_aperture, .input_bitmap = L"rgb32frame-crop-flip.bmp", + .output_type_desc = rgb32_with_aperture, .output_bitmap = L"rgb32frame-flip.bmp", + .output_sample_desc = &rgb32_sample_desc, }, { - .input_type_desc = rgb32_default_stride, .output_type_desc = rgb555_positive_stride, - .output_sample_desc = &rgb555_sample_desc, .result_bitmap = L"rgb555frame-flip.bmp", - .delta = 3, /* Windows returns 0, Wine needs 3 */ + .input_type_desc = rgb32_with_aperture, .input_bitmap = L"rgb32frame-flip.bmp", + .output_type_desc = rgb32_no_aperture, .output_bitmap = L"rgb32frame-crop-flip.bmp", + .output_sample_desc = &rgb32_crop_sample_desc, }, { - .input_type_desc = rgb555_default_stride, .output_type_desc = rgb555_positive_stride, - .output_sample_desc = &rgb555_sample_desc, .result_bitmap = L"rgb555frame-flip.bmp", - .delta = 4, /* Windows returns 0, Wine needs 4 */ + .input_type_desc = rgb32_with_aperture_positive_stride, .input_bitmap = L"rgb32frame.bmp", + .output_type_desc = rgb32_no_aperture, .output_bitmap = L"rgb32frame-crop-flip.bmp", + .output_sample_desc = &rgb32_crop_sample_desc, .delta = 3, /* Windows returns 3 */ }, }; @@ -7854,6 +7925,7 @@ static void test_video_processor(void) for (; k < ARRAY_SIZE(expect_available_outputs); k++) if (IsEqualGUID(&expect_available_outputs[k], &guid)) break; + todo_wine_if(IsEqualGUID(&guid, &MFVideoFormat_ABGR32)) /* enumerated on purpose on Wine */ ok(k < ARRAY_SIZE(expect_available_outputs), "got subtype %s\n", debugstr_guid(&guid)); ret = IMFMediaType_Release(media_type); @@ -7866,6 +7938,22 @@ static void test_video_processor(void) } ok(hr == MF_E_NO_MORE_TYPES, "GetInputAvailableType returned %#lx\n", hr); + /* MFVideoFormat_ABGR32 isn't supported by the video processor in non-D3D mode */ + check_mft_set_input_type(transform, nv12_default_stride); + + hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &media_type); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_ABGR32); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetOutputType(transform, 0, media_type, 0); + todo_wine ok(hr == MF_E_INVALIDMEDIATYPE, "got %#lx\n", hr); + IMFMediaType_Release(media_type); + + /* MFVideoFormat_RGB32 output format works */ + + check_mft_set_output_type(transform, rgb32_default_stride, S_OK); + check_mft_get_output_current_type(transform, rgb32_default_stride); + for (i = 0; i < ARRAY_SIZE(video_processor_tests); i++) { const struct transform_desc *test = video_processor_tests + i; @@ -7890,48 +7978,53 @@ static void test_video_processor(void) output_info.cbSize = actual_width * actual_height * 2; check_mft_get_output_stream_info(transform, S_OK, &output_info); } + else if (test->output_sample_desc == &rgb32_crop_sample_desc) + { + output_info.cbSize = actual_aperture.Area.cx * actual_aperture.Area.cy * 4; + check_mft_get_output_stream_info(transform, S_OK, &output_info); + } else { output_info.cbSize = actual_width * actual_height * 4; check_mft_get_output_stream_info(transform, S_OK, &output_info); } - if (test->input_type_desc == nv12_default_stride) + if (test->input_type_desc == nv12_default_stride || test->input_type_desc == nv12_with_aperture) { input_info.cbSize = actual_width * actual_height * 3 / 2; check_mft_get_input_stream_info(transform, S_OK, &input_info); - - load_resource(L"nv12frame.bmp", &input_data, &input_data_len); - /* skip BMP header and RGB data from the dump */ - length = *(DWORD *)(input_data + 2); - input_data_len = input_data_len - length; - ok(input_data_len == 13824, "got length %lu\n", input_data_len); - input_data = input_data + length; } else if (test->input_type_desc == rgb555_default_stride) { input_info.cbSize = actual_width * actual_height * 2; check_mft_get_input_stream_info(transform, S_OK, &input_info); - - load_resource(L"rgb555frame.bmp", &input_data, &input_data_len); - /* skip BMP header and RGB data from the dump */ - length = *(DWORD *)(input_data + 2 + 2 * sizeof(DWORD)); - input_data_len -= length; - ok(input_data_len == 18432, "got length %lu\n", input_data_len); - input_data += length; + } + else if (test->input_type_desc == rgb32_no_aperture) + { + input_info.cbSize = 82 * 84 * 4; + check_mft_get_input_stream_info(transform, S_OK, &input_info); } else { input_info.cbSize = actual_width * actual_height * 4; check_mft_get_input_stream_info(transform, S_OK, &input_info); + } - load_resource(L"rgb32frame.bmp", &input_data, &input_data_len); + load_resource(test->input_bitmap, &input_data, &input_data_len); + if (test->input_type_desc == nv12_default_stride || test->input_type_desc == nv12_with_aperture) + { /* skip BMP header and RGB data from the dump */ + length = *(DWORD *)(input_data + 2); + input_data_len = input_data_len - length; + } + else + { + /* skip BMP header */ length = *(DWORD *)(input_data + 2 + 2 * sizeof(DWORD)); input_data_len -= length; - ok(input_data_len == 36864, "got length %lu\n", input_data_len); - input_data += length; } + ok(input_data_len == input_info.cbSize, "got length %lu\n", input_data_len); + input_data += length; input_sample = create_sample(input_data, input_data_len); hr = IMFSample_SetSampleTime(input_sample, 0); @@ -7967,7 +8060,7 @@ static void test_video_processor(void) ref = IMFSample_Release(output_sample); ok(ref == 1, "Release returned %ld\n", ref); - ret = check_mf_sample_collection(output_samples, test->output_sample_desc, test->result_bitmap); + ret = check_mf_sample_collection(output_samples, test->output_sample_desc, test->output_bitmap); ok(ret <= test->delta || broken(test->broken), "got %lu%% diff\n", ret); IMFCollection_Release(output_samples); @@ -7981,7 +8074,13 @@ static void test_video_processor(void) } ret = IMFSample_Release(output_sample); ok(ret == 0, "Release returned %lu\n", ret); + winetest_pop_context(); + + hr = IMFTransform_SetInputType(transform, 0, NULL, 0); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetOutputType(transform, 0, NULL, 0); + ok(hr == S_OK, "got %#lx\n", hr); } ret = IMFTransform_Release(transform); @@ -8000,8 +8099,8 @@ static void test_video_processor(void) check_mft_set_output_type(transform, rgb32_no_aperture, S_OK); check_mft_get_output_current_type(transform, rgb32_no_aperture); - check_mft_set_input_type_(__LINE__, transform, nv12_with_aperture, TRUE); - check_mft_get_input_current_type_(__LINE__, transform, nv12_with_aperture, TRUE, FALSE); + check_mft_set_input_type(transform, nv12_with_aperture, S_OK); + check_mft_get_input_current_type(transform, nv12_with_aperture); /* output type is the same as before */ check_mft_get_output_current_type(transform, rgb32_no_aperture); @@ -8404,7 +8503,8 @@ static void test_h264_with_dxgi_manager(void) const struct buffer_desc output_buffer_desc_nv12 = { .length = aligned_width * aligned_height * 3 / 2, - .compare = compare_nv12, .dump = dump_nv12, .rect = {.top=0, .left=0, .right = set_width, .bottom = set_height}, + .compare = compare_nv12, .compare_rect = {.right = set_width, .bottom = set_height}, + .dump = dump_nv12, .size = {.cx = aligned_width, .cy = aligned_height}, }; const struct sample_desc output_sample_desc_nv12 = { @@ -8508,6 +8608,17 @@ static void test_h264_with_dxgi_manager(void) ok(hr == S_OK, "got %#lx\n", hr); IMFMediaType_Release(type); + /* MFVideoFormat_ABGR32 output isn't supported by the D3D11-enabled decoder */ + hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &type); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFVideoFormat_ABGR32); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetOutputType(transform, 0, type, 0); + ok(hr == MF_E_INVALIDMEDIATYPE, "got %#lx\n", hr); + IMFMediaType_Release(type); + hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &type); ok(hr == S_OK, "got %#lx\n", hr); hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); @@ -8654,7 +8765,13 @@ static void test_h264_with_dxgi_manager(void) status = 0; hr = get_next_h264_output_sample(transform, &input_sample, NULL, output, &data, &data_len); + todo_wine_if(hr == MF_E_UNEXPECTED) /* with some llvmpipe versions */ ok(hr == S_OK, "got %#lx\n", hr); + if (hr == MF_E_UNEXPECTED) + { + IMFSample_Release(input_sample); + goto failed; + } ok(sample != output[0].pSample, "got %p.\n", output[0].pSample); sample = output[0].pSample; @@ -8698,6 +8815,8 @@ static void test_h264_with_dxgi_manager(void) IMFDXGIDeviceManager_Release(manager); if (transform) IMFTransform_Release(transform); + + MFShutdown(); CoUninitialize(); } @@ -8820,8 +8939,9 @@ static void test_iv50_decoder(void) }; const struct buffer_desc rgb_buffer_desc = { - .length = 96 * 96 * 3, .compare = compare_rgb24, .dump = dump_rgb24, - .rect = {.right = 82, .bottom = 84}, + .length = 96 * 96 * 3, + .compare = compare_rgb24, .compare_rect = {.right = 82, .bottom = 84}, + .dump = dump_rgb24, .size = {.cx = 96, .cy = 96}, }; const struct sample_desc rgb_sample_desc = { @@ -8893,33 +9013,595 @@ static void test_iv50_decoder(void) IMFCollection_Release(collection); } -START_TEST(transform) +static IMFSample *create_d3d_sample(IMFVideoSampleAllocator *allocator, const void *data, ULONG size) { - init_functions(); + IMFMediaBuffer *media_buffer; + IMFSample *sample; + BYTE *buffer; + HRESULT hr; - test_sample_copier(); - test_sample_copier_output_processing(); - test_aac_encoder(); - test_aac_decoder(); - test_wma_encoder(); - test_wma_decoder(); - test_wma_decoder_dmo_input_type(); - test_wma_decoder_dmo_output_type(); - test_h264_encoder(); - test_h264_decoder(); - test_wmv_encoder(); - test_wmv_decoder(); - test_wmv_decoder_dmo_input_type(); - test_wmv_decoder_dmo_output_type(); - test_wmv_decoder_dmo_get_size_info(); - test_wmv_decoder_media_object(); - test_audio_convert(); - test_color_convert(); - test_video_processor(); - test_mp3_decoder(); - test_iv50_encoder(); - test_iv50_decoder(); + hr = IMFVideoSampleAllocator_AllocateSample(allocator, &sample); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFSample_SetSampleTime(sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFSample_SetSampleDuration(sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFSample_GetBufferByIndex(sample, 0, &media_buffer); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaBuffer_Lock(media_buffer, &buffer, NULL, NULL); + ok(hr == S_OK, "got %#lx\n", hr); + if (!data) memset(buffer, 0xcd, size); + else memcpy(buffer, data, size); + hr = IMFMediaBuffer_Unlock(media_buffer); + ok(hr == S_OK, "got %#lx\n", hr); + IMFMediaBuffer_Release(media_buffer); - test_h264_with_dxgi_manager(); - test_h264_decoder_concat_streams(); + return sample; +} + +static void test_video_processor_with_dxgi_manager(void) +{ + static const unsigned int set_width = 82, set_height = 84, aligned_width = 96, aligned_height = 96; + const struct attribute_desc output_sample_attributes[] = + { + {0}, + }; + const struct buffer_desc output_buffer_desc_rgb32 = + { + .length = aligned_width * aligned_height * 4, + .compare = compare_rgb32, .compare_rect = {.right = set_width, .bottom = set_height}, + .dump = dump_rgb32, .size = {.cx = aligned_width, .cy = aligned_height}, + }; + const struct sample_desc output_sample_desc_rgb32 = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 0, + .buffer_count = 1, .buffers = &output_buffer_desc_rgb32, + }; + + const struct buffer_desc output_buffer_desc_rgb32_crop = + { + .length = set_width * set_height * 4, + .compare = compare_rgb32, .compare_rect = {.right = set_width, .bottom = set_height}, + .dump = dump_rgb32, .size = {.cx = set_width, .cy = set_height}, + }; + const struct sample_desc output_sample_desc_rgb32_crop = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 0, + .buffer_count = 1, .buffers = &output_buffer_desc_rgb32_crop, + }; + + const struct buffer_desc output_buffer_desc_abgr32_crop = + { + .length = set_width * set_height * 4, + .compare = compare_abgr32, .compare_rect = {.right = set_width, .bottom = set_height}, + .dump = dump_rgb32, .size = {.cx = set_width, .cy = set_height}, + }; + const struct sample_desc output_sample_desc_abgr32_crop = + { + .attributes = output_sample_attributes, + .sample_time = 0, .sample_duration = 0, + .buffer_count = 1, .buffers = &output_buffer_desc_abgr32_crop, + }; + + const GUID expect_available_outputs[] = + { + MFVideoFormat_ARGB32, + MFVideoFormat_ABGR32, + MFVideoFormat_A2R10G10B10, + MFVideoFormat_A16B16G16R16F, + MFVideoFormat_NV12, + MFVideoFormat_P010, + MFVideoFormat_YUY2, + MFVideoFormat_L8, + MFVideoFormat_L16, + MFVideoFormat_D16, + }; + static const media_type_desc expect_available_common = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + }; + + static const MFVideoArea aperture = {.Area={set_width, set_height}}; + const struct attribute_desc nv12_with_aperture[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12), + ATTR_RATIO(MF_MT_FRAME_SIZE, aligned_width, aligned_height), + ATTR_BLOB(MF_MT_MINIMUM_DISPLAY_APERTURE, &aperture, 16), + ATTR_BLOB(MF_MT_GEOMETRIC_APERTURE, &aperture, 16), + ATTR_BLOB(MF_MT_PAN_SCAN_APERTURE, &aperture, 16), + {0}, + }; + const struct attribute_desc rgb32_no_aperture[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + ATTR_RATIO(MF_MT_FRAME_SIZE, set_width, set_height), + {0}, + }; + const struct attribute_desc abgr32_no_aperture[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_ABGR32), + ATTR_RATIO(MF_MT_FRAME_SIZE, set_width, set_height), + {0}, + }; + + IMFVideoSampleAllocator *allocator = NULL; + IMFDXGIDeviceManager *manager = NULL; + IMFTrackedSample *tracked_sample; + IMFTransform *transform = NULL; + ID3D11Multithread *multithread; + MFT_OUTPUT_DATA_BUFFER output; + IMFCollection *output_samples; + MFT_OUTPUT_STREAM_INFO info; + IMFDXGIBuffer *dxgi_buffer; + const BYTE *nv12frame_data; + D3D11_TEXTURE2D_DESC desc; + ULONG nv12frame_data_len; + IMFSample *input_sample; + IMFMediaBuffer *buffer; + IMFAttributes *attribs; + ID3D11Texture2D *tex2d; + IMF2DBuffer2 *buffer2d; + ID3D11Device *d3d11; + IMFMediaType *type; + DWORD status, val; + ULONG i, j, length; + UINT32 value; + HRESULT hr; + UINT token; + DWORD ret; + + if (!pMFCreateDXGIDeviceManager || !pMFCreateVideoSampleAllocatorEx) + { + win_skip("MFCreateDXGIDeviceManager / MFCreateVideoSampleAllocatorEx are not available, skipping tests.\n"); + return; + } + + hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0, + D3D11_SDK_VERSION, &d3d11, NULL, NULL); + if (FAILED(hr)) + { + skip("D3D11 device creation failed, skipping tests.\n"); + return; + } + + hr = MFStartup(MF_VERSION, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = CoInitialize(NULL); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = ID3D11Device_QueryInterface(d3d11, &IID_ID3D11Multithread, (void **)&multithread); + ok(hr == S_OK, "got %#lx\n", hr); + ID3D11Multithread_SetMultithreadProtected(multithread, TRUE); + ID3D11Multithread_Release(multithread); + + hr = pMFCreateDXGIDeviceManager(&token, &manager); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFDXGIDeviceManager_ResetDevice(manager, (IUnknown *)d3d11, token); + ok(hr == S_OK, "got %#lx\n", hr); + ID3D11Device_Release(d3d11); + + hr = pMFCreateVideoSampleAllocatorEx(&IID_IMFVideoSampleAllocator, (void **)&allocator); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFVideoSampleAllocator_SetDirectXManager(allocator, (IUnknown *)manager); + ok(hr == S_OK, "got %#lx\n", hr); + + if (FAILED(hr = CoCreateInstance(&CLSID_VideoProcessorMFT, NULL, CLSCTX_INPROC_SERVER, + &IID_IMFTransform, (void **)&transform))) + goto failed; + + hr = IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)transform); + todo_wine ok(hr == E_NOINTERFACE, "got %#lx\n", hr); + + hr = IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)manager); + ok(hr == S_OK || broken(hr == E_NOINTERFACE), "got %#lx\n", hr); + if (hr == E_NOINTERFACE) + { + win_skip("No hardware video decoding support.\n"); + goto failed; + } + + hr = IMFTransform_GetOutputStreamInfo(transform, 0, &info); + ok(hr == S_OK, "got %#lx\n", hr); + if (broken(!(info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) /* w8 / w1064v1507 */) + { + win_skip("missing video processor sample allocator support.\n"); + goto failed; + } + ok(info.dwFlags == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES, "got %#lx.\n", info.dwFlags); + + hr = MFCreateMediaType(&type); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFVideoFormat_NV12); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetUINT64(type, &MF_MT_FRAME_SIZE, (UINT64)96 << 32 | 96); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetInputType(transform, 0, type, 0); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFVideoSampleAllocator_InitializeSampleAllocator(allocator, 4, type); + ok(hr == S_OK, "got %#lx\n", hr); + IMFMediaType_Release(type); + + j = i = 0; + while (SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(transform, 0, ++i, &type))) + { + GUID guid; + winetest_push_context("out %lu", i); + ok(hr == S_OK, "GetOutputAvailableType returned %#lx\n", hr); + check_media_type(type, expect_available_common, -1); + + hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "GetGUID returned %#lx\n", hr); + + for (; j < ARRAY_SIZE(expect_available_outputs); j++) + if (IsEqualGUID(&expect_available_outputs[j], &guid)) + break; + todo_wine_if(i >= 2) + ok(j < ARRAY_SIZE(expect_available_outputs), "got subtype %s\n", debugstr_guid(&guid)); + + ret = IMFMediaType_Release(type); + ok(ret == 0, "Release returned %lu\n", ret); + winetest_pop_context(); + } + ok(hr == MF_E_NO_MORE_TYPES, "GetOutputAvailableType returned %#lx\n", hr); + + + /* MFVideoFormat_ABGR32 is supported by the D3D11-enabled video processor */ + + hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &type); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFVideoFormat_ABGR32); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetUINT64(type, &MF_MT_FRAME_SIZE, (UINT64)96 << 32 | 96); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetOutputType(transform, 0, type, 0); + ok(hr == S_OK, "got %#lx\n", hr); + IMFMediaType_Release(type); + + + hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &type); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaType_SetUINT64(type, &MF_MT_FRAME_SIZE, (UINT64)96 << 32 | 96); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFTransform_SetOutputType(transform, 0, type, 0); + ok(hr == S_OK, "got %#lx\n", hr); + IMFMediaType_Release(type); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == MF_E_TRANSFORM_NEED_MORE_INPUT, "got %#lx\n", hr); + + load_resource(L"nv12frame.bmp", &nv12frame_data, &nv12frame_data_len); + /* skip BMP header and RGB data from the dump */ + length = *(DWORD *)(nv12frame_data + 2); + nv12frame_data_len = nv12frame_data_len - length; + nv12frame_data = nv12frame_data + length; + ok(nv12frame_data_len == 13824, "got length %lu\n", nv12frame_data_len); + + /* native wants a dxgi buffer on input */ + input_sample = create_d3d_sample(allocator, nv12frame_data, nv12frame_data_len); + + hr = IMFTransform_ProcessInput(transform, 0, input_sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFTransform_GetOutputStreamInfo(transform, 0, &info); + ok(hr == S_OK, "got %#lx\n", hr); + ok(info.dwFlags == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES, "got %#lx.\n", info.dwFlags); + + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == S_OK, "got %#lx\n", hr); + ok(!output.pEvents, "got events\n"); + ok(!!output.pSample, "got no sample\n"); + ok(output.dwStatus == 0, "got %#lx\n", output.dwStatus); + ok(status == 0, "got %#lx\n", status); + + hr = IMFTransform_GetAttributes(transform, &attribs); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFAttributes_GetUINT32(attribs, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &value); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attribs); + + hr = IMFTransform_GetOutputStreamAttributes(transform, 0, &attribs); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFAttributes_GetCount(attribs, &value); + ok(hr == S_OK, "got %#lx\n", hr); + ok(value == 0, "got %u.\n", value); + IMFAttributes_Release(attribs); + + + hr = IMFSample_QueryInterface(output.pSample, &IID_IMFTrackedSample, (void **)&tracked_sample); + ok(hr == S_OK, "got %#lx\n", hr); + IMFTrackedSample_Release(tracked_sample); + + hr = IMFSample_GetBufferCount(output.pSample, &val); + ok(hr == S_OK, "got %#lx\n", hr); + ok(val == 1, "got %lu.\n", val); + hr = IMFSample_GetBufferByIndex(output.pSample, 0, &buffer); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer2, (void **)&buffer2d); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)&tex2d); + ok(hr == S_OK, "got %#lx\n", hr); + memset(&desc, 0xcc, sizeof(desc)); + ID3D11Texture2D_GetDesc(tex2d, &desc); + ok(desc.Format == DXGI_FORMAT_B8G8R8X8_UNORM, "got %#x.\n", desc.Format); + ok(!desc.Usage, "got %u.\n", desc.Usage); + ok(desc.BindFlags == (D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE), "got %#x.\n", desc.BindFlags); + ok(!desc.CPUAccessFlags, "got %#x.\n", desc.CPUAccessFlags); + ok(!desc.MiscFlags, "got %#x.\n", desc.MiscFlags); + ok(desc.MipLevels == 1, "git %u.\n", desc.MipLevels); + ok(desc.Width == aligned_width, "got %u.\n", desc.Width); + ok(desc.Height == aligned_height, "got %u.\n", desc.Height); + + ID3D11Texture2D_Release(tex2d); + IMFDXGIBuffer_Release(dxgi_buffer); + IMF2DBuffer2_Release(buffer2d); + IMFMediaBuffer_Release(buffer); + + + hr = MFCreateCollection(&output_samples); + ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr); + + hr = IMFCollection_AddElement(output_samples, (IUnknown *)output.pSample); + ok(hr == S_OK, "AddElement returned %#lx\n", hr); + IMFSample_Release(output.pSample); + + ret = check_mf_sample_collection(output_samples, &output_sample_desc_rgb32, L"rgb32frame.bmp"); + ok(ret <= 5, "got %lu%% diff\n", ret); + + for (i = 0; i < 9; i++) + { + hr = IMFTransform_ProcessInput(transform, 0, input_sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == S_OK, "got %#lx\n", hr); + ok(!output.pEvents, "got events\n"); + ok(!!output.pSample, "got no sample\n"); + ok(output.dwStatus == 0, "got %#lx\n", output.dwStatus); + ok(status == 0, "got %#lx\n", status); + + hr = IMFCollection_AddElement(output_samples, (IUnknown *)output.pSample); + ok(hr == S_OK, "AddElement returned %#lx\n", hr); + IMFSample_Release(output.pSample); + } + + hr = IMFTransform_ProcessInput(transform, 0, input_sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == MF_E_SAMPLEALLOCATOR_EMPTY, "got %#lx\n", hr); + + IMFCollection_Release(output_samples); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + /* FIXME: Wine sample release happens entirely asynchronously */ + flaky_wine_if(hr == MF_E_SAMPLEALLOCATOR_EMPTY) + ok(hr == S_OK, "got %#lx\n", hr); + ok(!output.pEvents, "got events\n"); + flaky_wine_if(hr == MF_E_SAMPLEALLOCATOR_EMPTY) + ok(!!output.pSample, "got no sample\n"); + ok(output.dwStatus == 0, "got %#lx\n", output.dwStatus); + ok(status == 0, "got %#lx\n", status); + if (output.pSample) + IMFSample_Release(output.pSample); + + + /* check RGB32 output aperture cropping with D3D buffers */ + + check_mft_set_input_type(transform, nv12_with_aperture, S_OK); + check_mft_set_output_type(transform, rgb32_no_aperture, S_OK); + + load_resource(L"nv12frame.bmp", &nv12frame_data, &nv12frame_data_len); + /* skip BMP header and RGB data from the dump */ + length = *(DWORD *)(nv12frame_data + 2); + nv12frame_data_len = nv12frame_data_len - length; + nv12frame_data = nv12frame_data + length; + ok(nv12frame_data_len == 13824, "got length %lu\n", nv12frame_data_len); + + input_sample = create_d3d_sample(allocator, nv12frame_data, nv12frame_data_len); + + hr = IMFTransform_ProcessInput(transform, 0, input_sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFTransform_GetOutputStreamInfo(transform, 0, &info); + ok(hr == S_OK, "got %#lx\n", hr); + ok(info.dwFlags == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES, "got %#lx.\n", info.dwFlags); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == S_OK, "got %#lx\n", hr); + ok(!output.pEvents, "got events\n"); + ok(!!output.pSample, "got no sample\n"); + ok(output.dwStatus == 0, "got %#lx\n", output.dwStatus); + ok(status == 0, "got %#lx\n", status); + if (!output.pSample) goto skip_rgb32; + + hr = IMFSample_GetBufferByIndex(output.pSample, 0, &buffer); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)&tex2d); + ok(hr == S_OK, "got %#lx\n", hr); + memset(&desc, 0xcc, sizeof(desc)); + ID3D11Texture2D_GetDesc(tex2d, &desc); + ok(desc.Format == DXGI_FORMAT_B8G8R8X8_UNORM, "got %#x.\n", desc.Format); + ok(!desc.Usage, "got %u.\n", desc.Usage); + ok(desc.BindFlags == (D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE), "got %#x.\n", desc.BindFlags); + ok(!desc.CPUAccessFlags, "got %#x.\n", desc.CPUAccessFlags); + ok(!desc.MiscFlags, "got %#x.\n", desc.MiscFlags); + ok(desc.MipLevels == 1, "git %u.\n", desc.MipLevels); + ok(desc.Width == set_width, "got %u.\n", desc.Width); + ok(desc.Height == set_height, "got %u.\n", desc.Height); + + ID3D11Texture2D_Release(tex2d); + IMFDXGIBuffer_Release(dxgi_buffer); + IMFMediaBuffer_Release(buffer); + + hr = MFCreateCollection(&output_samples); + ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr); + + hr = IMFCollection_AddElement(output_samples, (IUnknown *)output.pSample); + ok(hr == S_OK, "AddElement returned %#lx\n", hr); + IMFSample_Release(output.pSample); + + ret = check_mf_sample_collection(output_samples, &output_sample_desc_rgb32_crop, L"rgb32frame-crop.bmp"); + ok(ret <= 5, "got %lu%% diff\n", ret); + + IMFCollection_Release(output_samples); + + +skip_rgb32: + /* check ABGR32 output with D3D buffers */ + + check_mft_set_input_type(transform, nv12_with_aperture, S_OK); + check_mft_set_output_type(transform, abgr32_no_aperture, S_OK); + + load_resource(L"nv12frame.bmp", &nv12frame_data, &nv12frame_data_len); + /* skip BMP header and RGB data from the dump */ + length = *(DWORD *)(nv12frame_data + 2); + nv12frame_data_len = nv12frame_data_len - length; + nv12frame_data = nv12frame_data + length; + ok(nv12frame_data_len == 13824, "got length %lu\n", nv12frame_data_len); + + input_sample = create_d3d_sample(allocator, nv12frame_data, nv12frame_data_len); + + hr = IMFTransform_ProcessInput(transform, 0, input_sample, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFTransform_GetOutputStreamInfo(transform, 0, &info); + ok(hr == S_OK, "got %#lx\n", hr); + ok(info.dwFlags == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES, "got %#lx.\n", info.dwFlags); + + status = 0; + memset(&output, 0, sizeof(output)); + hr = IMFTransform_ProcessOutput(transform, 0, 1, &output, &status); + ok(hr == S_OK, "got %#lx\n", hr); + ok(!output.pEvents, "got events\n"); + ok(!!output.pSample, "got no sample\n"); + ok(output.dwStatus == 0, "got %#lx\n", output.dwStatus); + ok(status == 0, "got %#lx\n", status); + if (!output.pSample) goto skip_abgr32; + + hr = IMFSample_GetBufferByIndex(output.pSample, 0, &buffer); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer); + ok(hr == S_OK, "got %#lx\n", hr); + + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)&tex2d); + ok(hr == S_OK, "got %#lx\n", hr); + memset(&desc, 0xcc, sizeof(desc)); + ID3D11Texture2D_GetDesc(tex2d, &desc); + ok(desc.Format == DXGI_FORMAT_R8G8B8A8_UNORM, "got %#x.\n", desc.Format); + ok(!desc.Usage, "got %u.\n", desc.Usage); + todo_wine ok(desc.BindFlags == D3D11_BIND_RENDER_TARGET, "got %#x.\n", desc.BindFlags); + ok(!desc.CPUAccessFlags, "got %#x.\n", desc.CPUAccessFlags); + ok(!desc.MiscFlags, "got %#x.\n", desc.MiscFlags); + ok(desc.MipLevels == 1, "git %u.\n", desc.MipLevels); + ok(desc.Width == set_width, "got %u.\n", desc.Width); + ok(desc.Height == set_height, "got %u.\n", desc.Height); + + ID3D11Texture2D_Release(tex2d); + IMFDXGIBuffer_Release(dxgi_buffer); + IMFMediaBuffer_Release(buffer); + + hr = MFCreateCollection(&output_samples); + ok(hr == S_OK, "MFCreateCollection returned %#lx\n", hr); + + hr = IMFCollection_AddElement(output_samples, (IUnknown *)output.pSample); + ok(hr == S_OK, "AddElement returned %#lx\n", hr); + IMFSample_Release(output.pSample); + + ret = check_mf_sample_collection(output_samples, &output_sample_desc_abgr32_crop, L"abgr32frame-crop.bmp"); + ok(ret <= 8 /* NVIDIA needs 5, AMD needs 8 */, "got %lu%% diff\n", ret); + + IMFCollection_Release(output_samples); + + +skip_abgr32: + hr = IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, 0); + ok(hr == S_OK, "got %#lx\n", hr); + + memset(&info, 0xcc, sizeof(info)); + hr = IMFTransform_GetOutputStreamInfo(transform, 0, &info); + ok(hr == S_OK, "got %#lx\n", hr); + todo_wine ok(info.dwFlags == MFT_OUTPUT_STREAM_PROVIDES_SAMPLES, "got %#lx.\n", info.dwFlags); + + + hr = IMFVideoSampleAllocator_UninitializeSampleAllocator(allocator); + ok(hr == S_OK, "AddElement returned %#lx\n", hr); + + IMFSample_Release(input_sample); + +failed: + if (allocator) + IMFVideoSampleAllocator_Release(allocator); + if (manager) + IMFDXGIDeviceManager_Release(manager); + if (transform) + IMFTransform_Release(transform); + + MFShutdown(); + CoUninitialize(); +} + +START_TEST(transform) +{ + init_functions(); + + test_sample_copier(); + test_sample_copier_output_processing(); + test_aac_encoder(); + test_aac_decoder(); + test_wma_encoder(); + test_wma_decoder(); + test_wma_decoder_dmo_input_type(); + test_wma_decoder_dmo_output_type(); + test_h264_encoder(); + test_h264_decoder(); + test_wmv_encoder(); + test_wmv_decoder(); + test_wmv_decoder_dmo_input_type(); + test_wmv_decoder_dmo_output_type(); + test_wmv_decoder_dmo_get_size_info(); + test_wmv_decoder_media_object(); + test_audio_convert(); + test_color_convert(); + test_video_processor(); + test_mp3_decoder(); + test_iv50_encoder(); + test_iv50_decoder(); + + test_h264_with_dxgi_manager(); + test_h264_decoder_concat_streams(); + + test_video_processor_with_dxgi_manager(); } diff --git a/dlls/mf/topology_loader.c b/dlls/mf/topology_loader.c index a56fb3e3909..3be64055245 100644 --- a/dlls/mf/topology_loader.c +++ b/dlls/mf/topology_loader.c @@ -24,6 +24,9 @@ #include "winbase.h" #include "mfidl.h" +#include "evr.h" +#include "d3d9.h" +#include "dxva2api.h" #include "wine/debug.h" #include "wine/list.h" @@ -215,26 +218,50 @@ static HRESULT topology_node_list_branches(IMFTopologyNode *node, struct list *b return hr; } -static HRESULT topology_branch_fill_media_type(IMFMediaType *up_type, IMFMediaType *down_type) +static void media_type_try_copy_attr(IMFMediaType *dst, IMFMediaType *src, const GUID *attr, HRESULT *hr) { - HRESULT hr = S_OK; PROPVARIANT value; - UINT32 count; - GUID key; - if (FAILED(hr = IMFMediaType_GetCount(up_type, &count))) - return hr; + PropVariantInit(&value); + if (SUCCEEDED(*hr) && FAILED(IMFMediaType_GetItem(dst, attr, NULL)) + && SUCCEEDED(IMFMediaType_GetItem(src, attr, &value))) + *hr = IMFMediaType_SetItem(dst, attr, &value); + PropVariantClear(&value); +} - while (count--) - { - PropVariantInit(&value); - hr = IMFMediaType_GetItemByIndex(up_type, count, &key, &value); - if (SUCCEEDED(hr) && FAILED(IMFMediaType_GetItem(down_type, &key, NULL))) - hr = IMFMediaType_SetItem(down_type, &key, &value); - PropVariantClear(&value); - if (FAILED(hr)) - return hr; - } +/* update a media type with additional attributes reported by upstream element */ +/* also present in mfreadwrite/reader.c pipeline */ +static HRESULT update_media_type_from_upstream(IMFMediaType *media_type, IMFMediaType *upstream_type) +{ + HRESULT hr = S_OK; + + /* propagate common video attributes */ + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FRAME_SIZE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FRAME_RATE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_DEFAULT_STRIDE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_ROTATION, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FIXED_SIZE_SAMPLES, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_PIXEL_ASPECT_RATIO, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, &hr); + + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_CHROMA_SITING, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_INTERLACE_MODE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_TRANSFER_FUNCTION, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_PRIMARIES, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_YUV_MATRIX, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_LIGHTING, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_NOMINAL_RANGE, &hr); + + /* propagate common audio attributes */ + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_NUM_CHANNELS, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_CHANNEL_MASK, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, &hr); return hr; } @@ -310,7 +337,7 @@ static HRESULT topology_branch_connect_indirect(IMFTopology *topology, MF_CONNEC hr = topology_branch_connect_down(topology, MF_CONNECT_DIRECT, &up_branch, up_type); if (down_type) { - if (SUCCEEDED(topology_branch_fill_media_type(up_type, down_type)) + if (SUCCEEDED(update_media_type_from_upstream(down_type, up_type)) && SUCCEEDED(IMFTransform_SetOutputType(transform, 0, down_type, 0))) method = MF_CONNECT_DIRECT; } @@ -319,7 +346,7 @@ static HRESULT topology_branch_connect_indirect(IMFTopology *topology, MF_CONNEC if (SUCCEEDED(hr) && method != MF_CONNECT_DIRECT && SUCCEEDED(IMFTransform_GetOutputAvailableType(transform, 0, 0, &media_type))) { - if (SUCCEEDED(topology_branch_fill_media_type(up_type, media_type))) + if (SUCCEEDED(update_media_type_from_upstream(media_type, up_type))) IMFTransform_SetOutputType(transform, 0, media_type, 0); IMFMediaType_Release(media_type); } @@ -389,8 +416,8 @@ static HRESULT topology_branch_connect_down(IMFTopology *topology, MF_CONNECT_ME { IMFMediaTypeHandler *down_handler; IMFMediaType *down_type = NULL; - MF_CONNECT_METHOD method; MF_TOPOLOGY_TYPE type; + UINT32 method; DWORD flags; HRESULT hr; @@ -468,7 +495,7 @@ static HRESULT topology_branch_foreach_up_types(IMFTopology *topology, MF_CONNEC static HRESULT topology_branch_connect(IMFTopology *topology, MF_CONNECT_METHOD method_mask, struct topology_branch *branch, BOOL enumerate_source_types) { - MF_CONNECT_METHOD method; + UINT32 method; HRESULT hr; TRACE("topology %p, method_mask %#x, branch %s.\n", topology, method_mask, debugstr_topology_branch(branch)); @@ -543,25 +570,35 @@ static HRESULT topology_loader_resolve_branches(struct topoloader_context *conte return hr; } -static BOOL topology_loader_is_node_d3d_aware(IMFTopologyNode *node) +static BOOL topology_node_get_object_attributes(IMFTopologyNode *node, IMFAttributes **attributes) { - IMFAttributes *attributes; - unsigned int d3d_aware = 0; IMFTransform *transform; + HRESULT hr; - if (FAILED(topology_node_get_object(node, &IID_IMFAttributes, (void **)&attributes))) - return FALSE; - - IMFAttributes_GetUINT32(attributes, &MF_SA_D3D_AWARE, &d3d_aware); - IMFAttributes_Release(attributes); - - if (!d3d_aware && SUCCEEDED(topology_node_get_object(node, &IID_IMFTransform, (void **)&transform))) + if (SUCCEEDED(topology_node_get_object(node, &IID_IMFTransform, (void **)&transform))) { - d3d_aware = mf_is_sample_copier_transform(transform); + hr = IMFTransform_GetAttributes(transform, attributes); IMFTransform_Release(transform); + return hr; } - return !!d3d_aware; + return topology_node_get_object(node, &IID_IMFAttributes, (void **)&attributes); +} + +BOOL topology_node_is_d3d_aware(IMFTopologyNode *node) +{ + UINT32 d3d_aware, d3d11_aware; + IMFAttributes *attributes; + + if (FAILED(topology_node_get_object_attributes(node, &attributes))) + return FALSE; + if (FAILED(IMFAttributes_GetUINT32(attributes, &MF_SA_D3D_AWARE, &d3d_aware))) + d3d_aware = FALSE; + if (FAILED(IMFAttributes_GetUINT32(attributes, &MF_SA_D3D11_AWARE, &d3d11_aware))) + d3d11_aware = FALSE; + IMFAttributes_Release(attributes); + + return d3d_aware || d3d11_aware; } static HRESULT topology_loader_create_copier(IMFTopologyNode *upstream_node, DWORD upstream_output, @@ -617,33 +654,98 @@ static HRESULT topology_loader_connect_copier(struct topoloader_context *context return S_OK; } +HRESULT topology_node_set_device_manager(IMFTopologyNode *node, IUnknown *device_manager) +{ + IMFTransform *transform; + HRESULT hr; + + if (SUCCEEDED(hr = topology_node_get_object(node, &IID_IMFTransform, (void **)&transform))) + { + hr = IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, (LONG_PTR)device_manager); + IMFTransform_Release(transform); + } + + if (SUCCEEDED(hr)) + { + IMFTopologyNode *upstream; + DWORD i, count, output; + + hr = IMFTopologyNode_GetInputCount(node, &count); + + for (i = 0; SUCCEEDED(hr) && i < count; i++) + { + if (FAILED(IMFTopologyNode_GetInput(node, 0, &upstream, &output))) + continue; + + if (topology_node_is_d3d_aware(upstream)) + topology_node_set_device_manager(upstream, device_manager); + + IMFTopologyNode_Release(upstream); + } + } + + return hr; +} + +HRESULT stream_sink_get_device_manager(IMFStreamSink *stream_sink, IUnknown **device_manager) +{ + HRESULT hr; + + if (SUCCEEDED(hr = MFGetService((IUnknown *)stream_sink, &MR_VIDEO_ACCELERATION_SERVICE, + &IID_IMFDXGIDeviceManager, (void **)device_manager))) + return hr; + if (SUCCEEDED(hr = MFGetService((IUnknown *)stream_sink, &MR_VIDEO_ACCELERATION_SERVICE, + &IID_IDirect3DDeviceManager9, (void **)device_manager))) + return hr; + + return hr; +} + /* Right now this should be used for output nodes only. */ -static HRESULT topology_loader_connect_d3d_aware_input(struct topoloader_context *context, - IMFTopologyNode *node) +static HRESULT topology_loader_connect_d3d_aware_sink(struct topoloader_context *context, + IMFTopologyNode *node, MFTOPOLOGY_DXVA_MODE dxva_mode) { IMFTopologyNode *upstream_node; IMFTransform *copier = NULL; IMFStreamSink *stream_sink; + IUnknown *device_manager; DWORD upstream_output; HRESULT hr; if (FAILED(hr = topology_node_get_object(node, &IID_IMFStreamSink, (void **)&stream_sink))) return hr; - if (topology_loader_is_node_d3d_aware(node)) + if (SUCCEEDED(hr = stream_sink_get_device_manager(stream_sink, &device_manager))) { if (SUCCEEDED(IMFTopologyNode_GetInput(node, 0, &upstream_node, &upstream_output))) { - if (!topology_loader_is_node_d3d_aware(upstream_node)) + BOOL needs_copier = dxva_mode == MFTOPOLOGY_DXVA_DEFAULT; + IMFTransform *transform; + + if (needs_copier && SUCCEEDED(topology_node_get_object(upstream_node, &IID_IMFTransform, (void **)&transform))) { - if (SUCCEEDED(hr = topology_loader_create_copier(upstream_node, upstream_output, node, 0, &copier))) - { - hr = topology_loader_connect_copier(context, upstream_node, upstream_output, node, 0, copier); - IMFTransform_Release(copier); - } + MFT_OUTPUT_STREAM_INFO info = {0}; + + if (FAILED(IMFTransform_GetOutputStreamInfo(transform, upstream_output, &info)) + || !(info.dwFlags & (MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES | MFT_OUTPUT_STREAM_PROVIDES_SAMPLES))) + needs_copier = FALSE; + + IMFTransform_Release(transform); + } + + if (needs_copier && SUCCEEDED(hr = topology_loader_create_copier(upstream_node, upstream_output, node, 0, &copier))) + { + hr = topology_loader_connect_copier(context, upstream_node, upstream_output, node, 0, copier); + IMFTransform_Release(copier); } + + if (dxva_mode == MFTOPOLOGY_DXVA_FULL && topology_node_is_d3d_aware(upstream_node)) + topology_node_set_device_manager(upstream_node, device_manager); + IMFTopologyNode_Release(upstream_node); } + + IUnknown_Release(device_manager); } IMFStreamSink_Release(stream_sink); @@ -653,6 +755,7 @@ static HRESULT topology_loader_connect_d3d_aware_input(struct topoloader_context static void topology_loader_resolve_complete(struct topoloader_context *context) { + MFTOPOLOGY_DXVA_MODE dxva_mode; MF_TOPOLOGY_TYPE node_type; IMFTopologyNode *node; WORD i, node_count; @@ -660,6 +763,9 @@ static void topology_loader_resolve_complete(struct topoloader_context *context) IMFTopology_GetNodeCount(context->output_topology, &node_count); + if (FAILED(IMFTopology_GetUINT32(context->input_topology, &MF_TOPOLOGY_DXVA_MODE, (UINT32 *)&dxva_mode))) + dxva_mode = 0; + for (i = 0; i < node_count; ++i) { if (SUCCEEDED(IMFTopology_GetNode(context->output_topology, i, &node))) @@ -672,7 +778,7 @@ static void topology_loader_resolve_complete(struct topoloader_context *context) if (FAILED(IMFTopologyNode_GetItem(node, &MF_TOPONODE_STREAMID, NULL))) IMFTopologyNode_SetUINT32(node, &MF_TOPONODE_STREAMID, 0); - if (FAILED(hr = topology_loader_connect_d3d_aware_input(context, node))) + if (FAILED(hr = topology_loader_connect_d3d_aware_sink(context, node, dxva_mode))) WARN("Failed to connect D3D-aware input, hr %#lx.\n", hr); } else if (node_type == MF_TOPOLOGY_SOURCESTREAM_NODE) diff --git a/dlls/mfmediaengine/main.c b/dlls/mfmediaengine/main.c index 9e41d9dad84..85b4dbb471a 100644 --- a/dlls/mfmediaengine/main.c +++ b/dlls/mfmediaengine/main.c @@ -995,10 +995,6 @@ static HRESULT WINAPI media_engine_session_events_Invoke(IMFAsyncCallback *iface IMFMediaEngineNotify_EventNotify(engine->callback, MF_MEDIA_ENGINE_EVENT_ENDED, 0, 0); break; - - case MEEndOfPresentation: - video_frame_sink_notify_end_of_presentation(engine->presentation.frame_sink); - break; } failed: @@ -1101,6 +1097,7 @@ static HRESULT media_engine_create_effects(struct effect *effects, size_t count, for (i = 0; i < count; ++i) { + UINT32 method = MF_CONNECT_ALLOW_DECODER; IMFTopologyNode *node = NULL; if (FAILED(hr = MFCreateTopologyNode(MF_TOPOLOGY_TRANSFORM_NODE, &node))) @@ -1113,7 +1110,8 @@ static HRESULT media_engine_create_effects(struct effect *effects, size_t count, IMFTopologyNode_SetUINT32(node, &MF_TOPONODE_NOSHUTDOWN_ON_REMOVE, FALSE); if (effects[i].optional) - IMFTopologyNode_SetUINT32(node, &MF_TOPONODE_CONNECT_METHOD, MF_CONNECT_AS_OPTIONAL); + method |= MF_CONNECT_AS_OPTIONAL; + IMFTopologyNode_SetUINT32(node, &MF_TOPONODE_CONNECT_METHOD, method); IMFTopology_AddNode(topology, node); IMFTopologyNode_ConnectOutput(last, 0, node, 0); @@ -1186,7 +1184,7 @@ static HRESULT media_engine_create_video_renderer(struct media_engine *engine, I IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &subtype); - hr = create_video_frame_sink(media_type, &engine->sink_events, &engine->presentation.frame_sink); + hr = create_video_frame_sink(media_type, (IUnknown *)engine->device_manager, &engine->sink_events, &engine->presentation.frame_sink); IMFMediaType_Release(media_type); if (FAILED(hr)) return hr; @@ -2415,6 +2413,83 @@ static void media_engine_update_d3d11_frame_surface(ID3D11DeviceContext *context IMFSample_Release(sample); } +static HRESULT get_d3d11_resource_from_sample(IMFSample *sample, ID3D11Texture2D **resource, UINT *subresource) +{ + IMFDXGIBuffer *dxgi_buffer; + IMFMediaBuffer *buffer; + HRESULT hr; + + *resource = NULL; + *subresource = 0; + + if (FAILED(hr = IMFSample_GetBufferByIndex(sample, 0, &buffer))) + return hr; + + if (SUCCEEDED(hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer))) + { + IMFDXGIBuffer_GetSubresourceIndex(dxgi_buffer, subresource); + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Texture2D, (void **)resource); + IMFDXGIBuffer_Release(dxgi_buffer); + } + + IMFMediaBuffer_Release(buffer); + return hr; +} + +static HRESULT media_engine_transfer_d3d11(struct media_engine *engine, ID3D11Texture2D *dst_texture, + const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) +{ + MFVideoNormalizedRect src_rect_default = {0.0, 0.0, 1.0, 1.0}; + MFARGB color_default = {0, 0, 0, 0}; + D3D11_TEXTURE2D_DESC src_desc; + ID3D11DeviceContext *context; + ID3D11Texture2D *src_texture; + RECT dst_rect_default = {0}; + D3D11_BOX src_box = {0}; + ID3D11Device *device; + IMFSample *sample; + UINT subresource; + HRESULT hr; + + if (!src_rect) + src_rect = &src_rect_default; + if (!dst_rect) + dst_rect = &dst_rect_default; + if (!color) + color = &color_default; + + if (!video_frame_sink_get_sample(engine->presentation.frame_sink, &sample)) + return MF_E_UNEXPECTED; + hr = get_d3d11_resource_from_sample(sample, &src_texture, &subresource); + IMFSample_Release(sample); + if (FAILED(hr)) + return hr; + + if (FAILED(hr = media_engine_lock_d3d_device(engine, &device))) + { + ID3D11Texture2D_Release(src_texture); + return hr; + } + + ID3D11Texture2D_GetDesc(src_texture, &src_desc); + + src_box.left = src_rect->left * src_desc.Width; + src_box.top = src_rect->top * src_desc.Height; + src_box.front = 0; + src_box.right = src_rect->right * src_desc.Width; + src_box.bottom = src_rect->bottom * src_desc.Height; + src_box.back = 1; + + ID3D11Device_GetImmediateContext(device, &context); + ID3D11DeviceContext_CopySubresourceRegion(context, (ID3D11Resource *)dst_texture, 0, + dst_rect->left, dst_rect->top, 0, (ID3D11Resource *)src_texture, subresource, &src_box); + ID3D11DeviceContext_Release(context); + + media_engine_unlock_d3d_device(engine, device); + ID3D11Texture2D_Release(src_texture); + return hr; +} + static HRESULT media_engine_transfer_to_d3d11_texture(struct media_engine *engine, ID3D11Texture2D *texture, const MFVideoNormalizedRect *src_rect, const RECT *dst_rect, const MFARGB *color) { @@ -2580,7 +2655,8 @@ static HRESULT WINAPI media_engine_TransferVideoFrame(IMFMediaEngineEx *iface, I if (SUCCEEDED(IUnknown_QueryInterface(surface, &IID_ID3D11Texture2D, (void **)&texture))) { - hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); + if (!engine->device_manager || FAILED(hr = media_engine_transfer_d3d11(engine, texture, src_rect, dst_rect, color))) + hr = media_engine_transfer_to_d3d11_texture(engine, texture, src_rect, dst_rect, color); ID3D11Texture2D_Release(texture); } else diff --git a/dlls/mfmediaengine/mediaengine_private.h b/dlls/mfmediaengine/mediaengine_private.h index 9920a5ef19b..cdbdbdb90b0 100644 --- a/dlls/mfmediaengine/mediaengine_private.h +++ b/dlls/mfmediaengine/mediaengine_private.h @@ -20,10 +20,9 @@ struct video_frame_sink; -HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink); HRESULT video_frame_sink_query_iface(struct video_frame_sink *object, REFIID riid, void **obj); ULONG video_frame_sink_release(struct video_frame_sink *sink); int video_frame_sink_get_sample(struct video_frame_sink *sink, IMFSample **sample); HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime, LONGLONG *pts); -void video_frame_sink_notify_end_of_presentation(struct video_frame_sink *sink); diff --git a/dlls/mfmediaengine/tests/mfmediaengine.c b/dlls/mfmediaengine/tests/mfmediaengine.c index 47130d8e436..63a7e139193 100644 --- a/dlls/mfmediaengine/tests/mfmediaengine.c +++ b/dlls/mfmediaengine/tests/mfmediaengine.c @@ -72,34 +72,39 @@ static BOOL compare_double(double a, double b, double allowed_error) return fabs(a - b) <= allowed_error; } -static DWORD compare_rgb32(const BYTE *data, DWORD *length, const RECT *rect, const BYTE *expect) +static DWORD compare_rgb(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect, UINT bits) { - DWORD x, y, size, diff = 0, width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + DWORD x, y, step = bits / 8, data_size, diff = 0, width = size->cx, height = size->cy; /* skip BMP header from the dump */ - size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); - *length = *length + size; - expect = expect + size; + data_size = *(DWORD *)(expect + 2 + 2 * sizeof(DWORD)); + *length = *length + data_size; + expect = expect + data_size; - for (y = 0; y < height; y++, data += width * 4, expect += width * 4) + for (y = 0; y < height; y++, data += width * step, expect += width * step) { if (y < rect->top || y >= rect->bottom) continue; for (x = 0; x < width; x++) { if (x < rect->left || x >= rect->right) continue; - diff += abs((int)expect[4 * x + 0] - (int)data[4 * x + 0]); - diff += abs((int)expect[4 * x + 1] - (int)data[4 * x + 1]); - diff += abs((int)expect[4 * x + 2] - (int)data[4 * x + 2]); + diff += abs((int)expect[step * x + 0] - (int)data[step * x + 0]); + diff += abs((int)expect[step * x + 1] - (int)data[step * x + 1]); + if (step >= 3) diff += abs((int)expect[step * x + 2] - (int)data[step * x + 2]); } } - size = (rect->right - rect->left) * (rect->bottom - rect->top) * 3; - return diff * 100 / 256 / size; + data_size = (rect->right - rect->left) * (rect->bottom - rect->top) * min(step, 3); + return diff * 100 / 256 / data_size; } -static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE output) +static DWORD compare_rgb32(const BYTE *data, DWORD *length, const SIZE *size, const RECT *rect, const BYTE *expect) { - DWORD width = (rect->right + 0xf) & ~0xf, height = (rect->bottom + 0xf) & ~0xf; + return compare_rgb(data, length, size, rect, expect, 32); +} + +static void dump_rgb(const BYTE *data, DWORD length, const SIZE *size, HANDLE output, UINT bits) +{ + DWORD width = size->cx, height = size->cy; static const char magic[2] = "BM"; struct { @@ -113,7 +118,7 @@ static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE .biHeader = { .biSize = sizeof(BITMAPINFOHEADER), .biWidth = width, .biHeight = height, .biPlanes = 1, - .biBitCount = 32, .biCompression = BI_RGB, .biSizeImage = width * height * 4, + .biBitCount = bits, .biCompression = BI_RGB, .biSizeImage = width * height * (bits / 8), }, }; DWORD written; @@ -130,20 +135,25 @@ static void dump_rgb32(const BYTE *data, DWORD length, const RECT *rect, HANDLE ok(written == length, "written %lu bytes\n", written); } +static void dump_rgb32(const BYTE *data, DWORD length, const SIZE *size, HANDLE output) +{ + return dump_rgb(data, length, size, output, 32); +} + #define check_rgb32_data(a, b, c, d) check_rgb32_data_(__LINE__, a, b, c, d) -static void check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, DWORD length, const RECT *rect) +static DWORD check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, DWORD length, const RECT *rect) { + SIZE size = {rect->right, rect->bottom}; WCHAR output_path[MAX_PATH]; const BYTE *expect_data; HRSRC resource; HANDLE output; - DWORD diff; GetTempPathW(ARRAY_SIZE(output_path), output_path); lstrcatW(output_path, filename); output = CreateFileW(output_path, GENERIC_READ|GENERIC_WRITE, 0, NULL, CREATE_ALWAYS, 0, 0); ok(output != INVALID_HANDLE_VALUE, "CreateFileW failed, error %lu\n", GetLastError()); - dump_rgb32(data, length, rect, output); + dump_rgb32(data, length, &size, output); trace("created %s\n", debugstr_w(output_path)); CloseHandle(output); @@ -151,8 +161,7 @@ static void check_rgb32_data_(int line, const WCHAR *filename, const BYTE *data, ok(resource != 0, "FindResourceW failed, error %lu\n", GetLastError()); expect_data = LockResource(LoadResource(GetModuleHandleW(NULL), resource)); - diff = compare_rgb32(data, &length, rect, expect_data); - ok_(__FILE__, line)(diff <= 3 /* small difference in wine */, "Unexpected %lu%% diff\n", diff); + return compare_rgb32(data, &length, &size, rect, expect_data); } static void init_functions(void) @@ -1356,7 +1365,8 @@ static void test_TransferVideoFrame(void) ok(!!map_desc.pData, "got pData %p\n", map_desc.pData); ok(map_desc.DepthPitch == 16384, "got DepthPitch %u\n", map_desc.DepthPitch); ok(map_desc.RowPitch == desc.Width * 4, "got RowPitch %u\n", map_desc.RowPitch); - check_rgb32_data(L"rgb32frame.bmp", map_desc.pData, map_desc.RowPitch * desc.Height, &dst_rect); + res = check_rgb32_data(L"rgb32frame.bmp", map_desc.pData, map_desc.RowPitch * desc.Height, &dst_rect); + ok(res == 0, "Unexpected %lu%% diff\n", res); ID3D11DeviceContext_Unmap(context, (ID3D11Resource *)rb_texture, 0); ID3D11DeviceContext_Release(context); @@ -1377,33 +1387,39 @@ static void test_TransferVideoFrame(void) IMFMediaEngineNotify_Release(¬ify->IMFMediaEngineNotify_iface); } -struct passthrough_mft +struct test_transform { IMFTransform IMFTransform_iface; LONG refcount; - IMFMediaType *media_type_in, *media_type_out; - IMFSample *sample; - LONG processing_count; - UINT32 index; + IMFAttributes *attributes; + + UINT input_count; + IMFMediaType **input_types; + IMFMediaType *input_type; - CRITICAL_SECTION cs; + UINT output_count; + IMFMediaType **output_types; + IMFMediaType *output_type; + + IMFSample *sample; + UINT sample_count; }; -static struct passthrough_mft *impl_from_IMFTransform(IMFTransform *iface) +static struct test_transform *test_transform_from_IMFTransform(IMFTransform *iface) { - return CONTAINING_RECORD(iface, struct passthrough_mft, IMFTransform_iface); + return CONTAINING_RECORD(iface, struct test_transform, IMFTransform_iface); } -static HRESULT WINAPI passthrough_mft_QueryInterface(IMFTransform *iface, REFIID iid, void **out) +static HRESULT WINAPI test_transform_QueryInterface(IMFTransform *iface, REFIID iid, void **out) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); + struct test_transform *transform = test_transform_from_IMFTransform(iface); - if (IsEqualGUID(iid, &IID_IUnknown) || - IsEqualGUID(iid, &IID_IMFTransform)) + if (IsEqualGUID(iid, &IID_IUnknown) + || IsEqualGUID(iid, &IID_IMFTransform)) { - *out = &impl->IMFTransform_iface; - IUnknown_AddRef((IUnknown *)*out); + IMFTransform_AddRef(&transform->IMFTransform_iface); + *out = &transform->IMFTransform_iface; return S_OK; } @@ -1411,389 +1427,313 @@ static HRESULT WINAPI passthrough_mft_QueryInterface(IMFTransform *iface, REFIID return E_NOINTERFACE; } -static ULONG WINAPI passthrough_mft_AddRef(IMFTransform *iface) +static ULONG WINAPI test_transform_AddRef(IMFTransform *iface) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedIncrement(&impl->refcount); + struct test_transform *transform = test_transform_from_IMFTransform(iface); + ULONG refcount = InterlockedIncrement(&transform->refcount); return refcount; } -static ULONG WINAPI passthrough_mft_Release(IMFTransform *iface) +static ULONG WINAPI test_transform_Release(IMFTransform *iface) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedDecrement(&impl->refcount); + struct test_transform *transform = test_transform_from_IMFTransform(iface); + ULONG refcount = InterlockedDecrement(&transform->refcount); if (!refcount) { - if (impl->media_type_out) IMFMediaType_Release(impl->media_type_out); - if (impl->media_type_in) IMFMediaType_Release(impl->media_type_in); - DeleteCriticalSection(&impl->cs); - free(impl); + if (transform->input_type) + IMFMediaType_Release(transform->input_type); + if (transform->output_type) + IMFMediaType_Release(transform->output_type); + free(transform); } return refcount; } -static HRESULT WINAPI passthrough_mft_GetStreamLimits(IMFTransform *iface, DWORD *input_minimum, +static HRESULT WINAPI test_transform_GetStreamLimits(IMFTransform *iface, DWORD *input_minimum, DWORD *input_maximum, DWORD *output_minimum, DWORD *output_maximum) { - *input_minimum = *input_maximum = *output_minimum = *output_maximum = 1; - return S_OK; + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) +static HRESULT WINAPI test_transform_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) { *inputs = *outputs = 1; return S_OK; } -static HRESULT WINAPI passthrough_mft_GetStreamIDs(IMFTransform *iface, DWORD input_size, DWORD *inputs, +static HRESULT WINAPI test_transform_GetStreamIDs(IMFTransform *iface, DWORD input_size, DWORD *inputs, DWORD output_size, DWORD *outputs) { return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) +static HRESULT WINAPI test_transform_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) +static HRESULT WINAPI test_transform_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) { - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - info->dwFlags = - MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | - MFT_OUTPUT_STREAM_WHOLE_SAMPLES | - MFT_OUTPUT_STREAM_FIXED_SAMPLE_SIZE | - MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER; - - info->cbAlignment = 0; - info->cbSize = 0; + memset(info, 0, sizeof(*info)); return S_OK; } -static HRESULT WINAPI passthrough_mft_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) +static HRESULT WINAPI test_transform_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) { - return E_NOTIMPL; + struct test_transform *transform = test_transform_from_IMFTransform(iface); + if (!(*attributes = transform->attributes)) + return E_NOTIMPL; + IMFAttributes_AddRef(*attributes); + return S_OK; } -static HRESULT WINAPI passthrough_mft_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +static HRESULT WINAPI test_transform_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) { return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +static HRESULT WINAPI test_transform_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) { return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_DeleteInputStream(IMFTransform *iface, DWORD id) +static HRESULT WINAPI test_transform_DeleteInputStream(IMFTransform *iface, DWORD id) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) +static HRESULT WINAPI test_transform_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, +static HRESULT WINAPI test_transform_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, IMFMediaType **type) { - static const GUID *types[] = { &MFMediaType_Video, &MFMediaType_Audio }; - HRESULT hr; + struct test_transform *transform = test_transform_from_IMFTransform(iface); - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - if (index > ARRAY_SIZE(types) - 1) - return MF_E_NO_MORE_TYPES; - - if (SUCCEEDED(hr = MFCreateMediaType(type))) - hr = IMFMediaType_SetGUID(*type, &MF_MT_MAJOR_TYPE, types[index]); - - return hr; -} - -static HRESULT WINAPI passthrough_mft_GetOutputAvailableType(IMFTransform *iface, DWORD id, DWORD index, - IMFMediaType **type) -{ - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; - - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - EnterCriticalSection(&impl->cs); - - if (index) + if (index >= transform->input_count) { - hr = MF_E_NO_MORE_TYPES; - } - else if (impl->media_type_out) - { - *type = impl->media_type_out; - IMFMediaType_AddRef(*type); - } - else if (impl->media_type_in) - { - *type = impl->media_type_in; - IMFMediaType_AddRef(*type); - } - else - { - hr = MF_E_TRANSFORM_TYPE_NOT_SET; + *type = NULL; + return MF_E_NO_MORE_TYPES; } - LeaveCriticalSection(&impl->cs); - - return hr; + *type = transform->input_types[index]; + IMFMediaType_AddRef(*type); + return S_OK; } -static HRESULT WINAPI passthrough_mft_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) +static HRESULT WINAPI test_transform_GetOutputAvailableType(IMFTransform *iface, DWORD id, + DWORD index, IMFMediaType **type) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; + struct test_transform *transform = test_transform_from_IMFTransform(iface); - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - EnterCriticalSection(&impl->cs); - - if (!(flags & MFT_SET_TYPE_TEST_ONLY)) + if (index >= transform->output_count) { - if (impl->media_type_in) - IMFMediaType_Release(impl->media_type_in); - - impl->media_type_in = type; - IMFMediaType_AddRef(impl->media_type_in); + *type = NULL; + return MF_E_NO_MORE_TYPES; } - LeaveCriticalSection(&impl->cs); - - return hr; -} - -static HRESULT WINAPI passthrough_mft_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) -{ - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - EnterCriticalSection(&impl->cs); - - if (impl->media_type_out) - IMFMediaType_Release(impl->media_type_out); - - impl->media_type_out = type; - IMFMediaType_AddRef(impl->media_type_out); - - LeaveCriticalSection(&impl->cs); - + *type = transform->output_types[index]; + IMFMediaType_AddRef(*type); return S_OK; } -static HRESULT WINAPI passthrough_mft_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +static HRESULT WINAPI test_transform_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; - - if (id) - return MF_E_INVALIDSTREAMNUMBER; + struct test_transform *transform = test_transform_from_IMFTransform(iface); + GUID subtype, desired; + HRESULT hr; - EnterCriticalSection(&impl->cs); - if (impl->media_type_in) + if (type) { - *type = impl->media_type_in; - IMFMediaType_AddRef(*type); + hr = IMFMediaType_GetGUID(transform->input_types[0], &MF_MT_SUBTYPE, &subtype); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &desired); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (!IsEqualGUID(&subtype, &desired)) + return MF_E_INVALIDMEDIATYPE; } - else + + if (flags & MFT_SET_TYPE_TEST_ONLY) { - hr = MF_E_TRANSFORM_TYPE_NOT_SET; + todo_wine ok(0, "Unexpected %s call.\n", __func__); + return winetest_platform_is_wine ? S_OK : E_NOTIMPL; } - LeaveCriticalSection(&impl->cs); - - return hr; + if (transform->input_type) + IMFMediaType_Release(transform->input_type); + if ((transform->input_type = type)) + IMFMediaType_AddRef(transform->input_type); + return S_OK; } -static HRESULT WINAPI passthrough_mft_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +static HRESULT WINAPI test_transform_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; - - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - EnterCriticalSection(&impl->cs); + struct test_transform *transform = test_transform_from_IMFTransform(iface); + GUID subtype, desired; + HRESULT hr; - if (impl->media_type_out) + if (type) { - *type = impl->media_type_out; - IMFMediaType_AddRef(*type); + hr = IMFMediaType_GetGUID(transform->output_types[0], &MF_MT_SUBTYPE, &subtype); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &desired); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (!IsEqualGUID(&subtype, &desired)) + return MF_E_INVALIDMEDIATYPE; } - else + + if (flags & MFT_SET_TYPE_TEST_ONLY) { - hr = MF_E_TRANSFORM_TYPE_NOT_SET; + todo_wine ok(0, "Unexpected %s call.\n", __func__); + return winetest_platform_is_wine ? S_OK : E_NOTIMPL; } + if (transform->output_type) + IMFMediaType_Release(transform->output_type); + if ((transform->output_type = type)) + IMFMediaType_AddRef(transform->output_type); + return S_OK; +} - LeaveCriticalSection(&impl->cs); +static HRESULT WINAPI test_transform_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct test_transform *transform = test_transform_from_IMFTransform(iface); + if (!(*type = transform->input_type)) + return MF_E_TRANSFORM_TYPE_NOT_SET; + IMFMediaType_AddRef(*type); + return S_OK; +} - return hr; +static HRESULT WINAPI test_transform_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct test_transform *transform = test_transform_from_IMFTransform(iface); + if (!(*type = transform->output_type)) + return MF_E_TRANSFORM_TYPE_NOT_SET; + IMFMediaType_AddRef(*type); + return S_OK; } -static HRESULT WINAPI passthrough_mft_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) +static HRESULT WINAPI test_transform_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_GetOutputStatus(IMFTransform *iface, DWORD *flags) +static HRESULT WINAPI test_transform_GetOutputStatus(IMFTransform *iface, DWORD *flags) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) +static HRESULT WINAPI test_transform_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) +static HRESULT WINAPI test_transform_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) { + ok(0, "Unexpected %s call.\n", __func__); return E_NOTIMPL; } -static HRESULT WINAPI passthrough_mft_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) +static HRESULT WINAPI test_transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) { - if (message == MFT_MESSAGE_COMMAND_FLUSH) - return E_NOTIMPL; - return S_OK; } -static HRESULT WINAPI passthrough_mft_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) +static HRESULT WINAPI test_transform_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; - - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - EnterCriticalSection(&impl->cs); - if (impl->sample) - { - hr = MF_E_NOTACCEPTING; - } - else - { - impl->sample = sample; - IMFSample_AddRef(impl->sample); - } - - LeaveCriticalSection(&impl->cs); - - return hr; + struct test_transform *transform = test_transform_from_IMFTransform(iface); + if (transform->sample) + return MF_E_NOTACCEPTING; + transform->sample = sample; + IMFSample_AddRef(transform->sample); + return S_OK; } -static HRESULT WINAPI passthrough_mft_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, - MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) +static HRESULT WINAPI test_transform_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, + MFT_OUTPUT_DATA_BUFFER *data, DWORD *status) { - struct passthrough_mft *impl = impl_from_IMFTransform(iface); - HRESULT hr = S_OK; - UINT32 val = 41; - - if (count != 1) - return E_INVALIDARG; - - EnterCriticalSection(&impl->cs); - - if (impl->sample) - { - hr = IMFSample_GetUINT32(impl->sample, &IID_IMFSample, &val); - - if (impl->index > 0) - { - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(val == impl->index, "Got unexpected value %u.\n", val); - } - else - { - ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - } - - IMFSample_SetUINT32(impl->sample, &IID_IMFSample, impl->index + 1); - - samples->pSample = impl->sample; - *status = samples[0].dwStatus = 0; - impl->processing_count++; - - impl->sample = NULL; - - hr = S_OK; - } - else - { - hr = MF_E_TRANSFORM_NEED_MORE_INPUT; - } - - LeaveCriticalSection(&impl->cs); - - return hr; + struct test_transform *transform = test_transform_from_IMFTransform(iface); + if (!transform->sample) + return MF_E_TRANSFORM_NEED_MORE_INPUT; + transform->sample_count++; + data->pSample = transform->sample; + transform->sample = NULL; + *status = 0; + return S_OK; } -static const IMFTransformVtbl passthrough_mft_vtbl = -{ - passthrough_mft_QueryInterface, - passthrough_mft_AddRef, - passthrough_mft_Release, - passthrough_mft_GetStreamLimits, - passthrough_mft_GetStreamCount, - passthrough_mft_GetStreamIDs, - passthrough_mft_GetInputStreamInfo, - passthrough_mft_GetOutputStreamInfo, - passthrough_mft_GetAttributes, - passthrough_mft_GetInputStreamAttributes, - passthrough_mft_GetOutputStreamAttributes, - passthrough_mft_DeleteInputStream, - passthrough_mft_AddInputStreams, - passthrough_mft_GetInputAvailableType, - passthrough_mft_GetOutputAvailableType, - passthrough_mft_SetInputType, - passthrough_mft_SetOutputType, - passthrough_mft_GetInputCurrentType, - passthrough_mft_GetOutputCurrentType, - passthrough_mft_GetInputStatus, - passthrough_mft_GetOutputStatus, - passthrough_mft_SetOutputBounds, - passthrough_mft_ProcessEvent, - passthrough_mft_ProcessMessage, - passthrough_mft_ProcessInput, - passthrough_mft_ProcessOutput, +static UINT test_transform_get_sample_count(IMFTransform *iface) +{ + struct test_transform *transform = test_transform_from_IMFTransform(iface); + return transform->sample_count; +} + +static const IMFTransformVtbl test_transform_vtbl = +{ + test_transform_QueryInterface, + test_transform_AddRef, + test_transform_Release, + test_transform_GetStreamLimits, + test_transform_GetStreamCount, + test_transform_GetStreamIDs, + test_transform_GetInputStreamInfo, + test_transform_GetOutputStreamInfo, + test_transform_GetAttributes, + test_transform_GetInputStreamAttributes, + test_transform_GetOutputStreamAttributes, + test_transform_DeleteInputStream, + test_transform_AddInputStreams, + test_transform_GetInputAvailableType, + test_transform_GetOutputAvailableType, + test_transform_SetInputType, + test_transform_SetOutputType, + test_transform_GetInputCurrentType, + test_transform_GetOutputCurrentType, + test_transform_GetInputStatus, + test_transform_GetOutputStatus, + test_transform_SetOutputBounds, + test_transform_ProcessEvent, + test_transform_ProcessMessage, + test_transform_ProcessInput, + test_transform_ProcessOutput, }; -HRESULT passthrough_mft_create(UINT32 index, struct passthrough_mft **out) +static HRESULT WINAPI test_transform_create(UINT input_count, IMFMediaType **input_types, + UINT output_count, IMFMediaType **output_types, IMFTransform **out) { - struct passthrough_mft *impl; - - *out = NULL; + struct test_transform *transform; - if (!(impl = calloc(1, sizeof(*impl)))) + if (!(transform = calloc(1, sizeof(*transform)))) return E_OUTOFMEMORY; - - impl->IMFTransform_iface.lpVtbl = &passthrough_mft_vtbl; - impl->index = index; - impl->refcount = 1; - - InitializeCriticalSection(&impl->cs); - - *out = impl; + transform->IMFTransform_iface.lpVtbl = &test_transform_vtbl; + transform->refcount = 1; + + transform->input_count = input_count; + transform->input_types = input_types; + transform->input_type = input_types[0]; + IMFMediaType_AddRef(transform->input_type); + transform->output_count = output_count; + transform->output_types = output_types; + transform->output_type = output_types[0]; + IMFMediaType_AddRef(transform->output_type); + + *out = &transform->IMFTransform_iface; return S_OK; } static void test_effect(void) { - struct passthrough_mft *video_effect = NULL, *video_effect2 = NULL, *audio_effect = NULL, *audio_effect2 = NULL; + IMFTransform *video_effect = NULL, *video_effect2 = NULL, *audio_effect = NULL, *audio_effect2 = NULL; + IMFMediaType *video_i420, *video_rgb32, *audio_pcm; IMFMediaEngineEx *media_engine = NULL; struct test_transfer_notify *notify; ID3D11Texture2D *texture = NULL; @@ -1802,8 +1742,8 @@ static void test_effect(void) D3D11_TEXTURE2D_DESC desc; IMFByteStream *stream; IMFMediaSink *sink; + UINT token, count; RECT dst_rect; - UINT token; HRESULT hr; DWORD res; BSTR url; @@ -1812,6 +1752,42 @@ static void test_effect(void) notify = create_transfer_notify(); + hr = MFCreateMediaType(&video_i420); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(video_i420, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(video_i420, &MF_MT_SUBTYPE, &MFVideoFormat_I420); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(video_i420, &MF_MT_FRAME_SIZE, (UINT64)64 << 32 | 64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateMediaType(&video_rgb32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(video_rgb32, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(video_rgb32, &MF_MT_SUBTYPE, &MFVideoFormat_ARGB32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(video_rgb32, &MF_MT_FRAME_SIZE, (UINT64)64 << 32 | 64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateMediaType(&audio_pcm); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(audio_pcm, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(audio_pcm, &MF_MT_SUBTYPE, &MFAudioFormat_PCM); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_SetUINT32(audio_pcm, &MF_MT_AUDIO_NUM_CHANNELS, 2); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(audio_pcm, &MF_MT_AUDIO_SAMPLES_PER_SECOND, 44100); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(audio_pcm, &MF_MT_AUDIO_BITS_PER_SAMPLE, 32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(audio_pcm, &MF_MT_AUDIO_BLOCK_ALIGNMENT, 8); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(audio_pcm, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 352800); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (!(device = create_d3d11_device())) { skip("Failed to create a D3D11 device, skipping tests.\n"); @@ -1825,11 +1801,8 @@ static void test_effect(void) create_media_engine(¬ify->IMFMediaEngineNotify_iface, manager, DXGI_FORMAT_B8G8R8X8_UNORM, &IID_IMFMediaEngineEx, (void **)&media_engine); - IMFDXGIDeviceManager_Release(manager); - - if (!(notify->media_engine = media_engine)) - goto done; + notify->media_engine = media_engine; memset(&desc, 0, sizeof(desc)); desc.Width = 64; @@ -1844,46 +1817,44 @@ static void test_effect(void) hr = IMFMediaEngineEx_RemoveAllEffects(media_engine); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = passthrough_mft_create(0, &video_effect); + hr = test_transform_create(1, &video_rgb32, 1, &video_rgb32, &video_effect); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - - hr = passthrough_mft_create(1, &video_effect2); + hr = test_transform_create(1, &video_i420, 1, &video_i420, &video_effect2); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)&video_effect->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)video_effect, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&video_effect->IMFTransform_iface, 2); + EXPECT_REF(video_effect, 2); - hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)&video_effect2->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)video_effect2, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&video_effect2->IMFTransform_iface, 2); + EXPECT_REF(video_effect2, 2); hr = IMFMediaEngineEx_RemoveAllEffects(media_engine); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&video_effect->IMFTransform_iface, 1); - EXPECT_REF(&video_effect2->IMFTransform_iface, 1); + EXPECT_REF(video_effect, 1); + EXPECT_REF(video_effect2, 1); - hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)&video_effect->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)video_effect, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&video_effect->IMFTransform_iface, 2); + EXPECT_REF(video_effect, 2); - hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)&video_effect2->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertVideoEffect(media_engine, (IUnknown *)video_effect2, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&video_effect2->IMFTransform_iface, 2); + EXPECT_REF(video_effect2, 2); - hr = passthrough_mft_create(0, &audio_effect); + hr = test_transform_create(1, &audio_pcm, 1, &audio_pcm, &audio_effect); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - - hr = passthrough_mft_create(1, &audio_effect2); + hr = test_transform_create(1, &audio_pcm, 1, &audio_pcm, &audio_effect2); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaEngineEx_InsertAudioEffect(media_engine, (IUnknown *)&audio_effect->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertAudioEffect(media_engine, (IUnknown *)audio_effect, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&audio_effect->IMFTransform_iface, 2); + EXPECT_REF(audio_effect, 2); - hr = IMFMediaEngineEx_InsertAudioEffect(media_engine, (IUnknown *)&audio_effect2->IMFTransform_iface, FALSE); + hr = IMFMediaEngineEx_InsertAudioEffect(media_engine, (IUnknown *)audio_effect2, FALSE); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - EXPECT_REF(&audio_effect2->IMFTransform_iface, 2); + EXPECT_REF(audio_effect2, 2); url = SysAllocString(L"i420-64x64.avi"); hr = IMFMediaEngineEx_SetSourceFromByteStream(media_engine, stream, url); @@ -1899,47 +1870,47 @@ static void test_effect(void) hr = IMFMediaEngineEx_TransferVideoFrame(notify->media_engine, (IUnknown *)texture, NULL, &dst_rect, NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(video_effect->processing_count > 0, "Unexpected processing count %lu.\n", video_effect->processing_count); - ok(video_effect2->processing_count > 0, "Unexpected processing count %lu.\n", video_effect2->processing_count); + count = test_transform_get_sample_count(video_effect); + ok(count > 0, "Unexpected processing count %u.\n", count); + count = test_transform_get_sample_count(video_effect2); + ok(count > 0, "Unexpected processing count %u.\n", count); if (SUCCEEDED(hr = MFCreateAudioRenderer(NULL, &sink))) { - ok(audio_effect->processing_count > 0, "Unexpected processing count %lu.\n", audio_effect->processing_count); - ok(audio_effect2->processing_count > 0, "Unexpected processing count %lu.\n", audio_effect2->processing_count); + count = test_transform_get_sample_count(audio_effect); + ok(count > 0, "Unexpected processing count %u.\n", count); + count = test_transform_get_sample_count(audio_effect2); + ok(count > 0, "Unexpected processing count %u.\n", count); IMFMediaSink_Release(sink); } else if (hr == MF_E_NO_AUDIO_PLAYBACK_DEVICE) { - ok(!audio_effect->processing_count, "Unexpected processing count %lu.\n", audio_effect->processing_count); - ok(!audio_effect2->processing_count, "Unexpected processing count %lu.\n", audio_effect2->processing_count); + count = test_transform_get_sample_count(audio_effect); + ok(!count, "Unexpected processing count %u.\n", count); + count = test_transform_get_sample_count(audio_effect2); + ok(!count, "Unexpected processing count %u.\n", count); } -done: - if (media_engine) - { - IMFMediaEngineEx_Shutdown(media_engine); - - hr = IMFMediaEngineEx_RemoveAllEffects(media_engine); - ok(hr == MF_E_SHUTDOWN, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaEngineEx_Shutdown(media_engine); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaEngineEx_RemoveAllEffects(media_engine); + ok(hr == MF_E_SHUTDOWN, "Unexpected hr %#lx.\n", hr); + IMFMediaEngineEx_Release(media_engine); - IMFMediaEngineEx_Release(media_engine); - } + ID3D11Texture2D_Release(texture); - if (texture) - ID3D11Texture2D_Release(texture); - if (device) - ID3D11Device_Release(device); + IMFTransform_Release(audio_effect2); + IMFTransform_Release(audio_effect); + IMFTransform_Release(video_effect2); + IMFTransform_Release(video_effect); - if (audio_effect2) - IMFTransform_Release(&audio_effect2->IMFTransform_iface); - if (audio_effect) - IMFTransform_Release(&audio_effect->IMFTransform_iface); + ID3D11Device_Release(device); - if (video_effect2) - IMFTransform_Release(&video_effect2->IMFTransform_iface); - if (video_effect) - IMFTransform_Release(&video_effect->IMFTransform_iface); +done: + IMFMediaType_Release(audio_pcm); + IMFMediaType_Release(video_rgb32); + IMFMediaType_Release(video_i420); IMFMediaEngineNotify_Release(¬ify->IMFMediaEngineNotify_iface); } diff --git a/dlls/mfmediaengine/video_frame_sink.c b/dlls/mfmediaengine/video_frame_sink.c index 025b2dd341a..57d4172bda8 100644 --- a/dlls/mfmediaengine/video_frame_sink.c +++ b/dlls/mfmediaengine/video_frame_sink.c @@ -27,6 +27,9 @@ #include "mediaengine_private.h" +#include "initguid.h" +#include "evr.h" + #include "wine/debug.h" #include "wine/list.h" @@ -72,7 +75,9 @@ struct video_frame_sink IMFMediaEventGenerator IMFMediaEventGenerator_iface; IMFStreamSink IMFStreamSink_iface; IMFMediaTypeHandler IMFMediaTypeHandler_iface; + IMFGetService IMFGetService_iface; LONG refcount; + IUnknown *device_manager; IMFMediaType *media_type; IMFMediaType *current_media_type; BOOL is_shut_down; @@ -89,7 +94,6 @@ struct video_frame_sink int sample_read_index; BOOL sample_request_pending; BOOL sample_presented; - BOOL eos; CRITICAL_SECTION cs; }; @@ -126,6 +130,11 @@ static struct video_frame_sink *impl_from_IMFMediaTypeHandler(IMFMediaTypeHandle return CONTAINING_RECORD(iface, struct video_frame_sink, IMFMediaTypeHandler_iface); } +static struct video_frame_sink *impl_from_IMFGetService(IMFGetService *iface) +{ + return CONTAINING_RECORD(iface, struct video_frame_sink, IMFGetService_iface); +} + static void video_frame_sink_samples_release(struct video_frame_sink *sink) { for (int i = 0; i < ARRAYSIZE(sink->sample); i++) @@ -162,6 +171,10 @@ static HRESULT WINAPI video_frame_sink_stream_QueryInterface(IMFStreamSink *ifac { *obj = &sink->IMFMediaTypeHandler_iface; } + else if (IsEqualIID(riid, &IID_IMFGetService)) + { + *obj = &sink->IMFGetService_iface; + } else { WARN("Unsupported %s.\n", debugstr_guid(riid)); @@ -287,7 +300,7 @@ static HRESULT WINAPI video_frame_sink_stream_GetMediaTypeHandler(IMFStreamSink /* must be called with critical section held */ static void video_frame_sink_stream_request_sample(struct video_frame_sink *sink) { - if (sink->sample_request_pending || sink->eos) + if (sink->sample_request_pending) return; IMFStreamSink_QueueEvent(&sink->IMFStreamSink_iface, MEStreamSinkRequestSample, &GUID_NULL, S_OK, NULL); @@ -575,6 +588,49 @@ static const IMFMediaTypeHandlerVtbl video_frame_sink_stream_type_handler_vtbl = video_frame_sink_stream_type_handler_GetMajorType, }; +static HRESULT WINAPI video_frame_sink_stream_get_service_QueryInterface(IMFGetService *iface, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_QueryInterface(&sink->IMFStreamSink_iface, riid, obj); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_AddRef(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_AddRef(&sink->IMFStreamSink_iface); +} + +static ULONG WINAPI video_frame_sink_stream_get_service_Release(IMFGetService *iface) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + return IMFStreamSink_Release(&sink->IMFStreamSink_iface); +} + +static HRESULT WINAPI video_frame_sink_stream_get_service_GetService(IMFGetService *iface, REFGUID service, REFIID riid, + void **obj) +{ + struct video_frame_sink *sink = impl_from_IMFGetService(iface); + + if (IsEqualGUID(service, &MR_VIDEO_ACCELERATION_SERVICE)) + { + if (sink->device_manager) + return IUnknown_QueryInterface(sink->device_manager, riid, obj); + return E_NOINTERFACE; + } + + FIXME("Unsupported service %s, riid %s.\n", debugstr_guid(service), debugstr_guid(riid)); + return MF_E_UNSUPPORTED_SERVICE; +} + +static const IMFGetServiceVtbl video_frame_sink_stream_get_service_vtbl = +{ + video_frame_sink_stream_get_service_QueryInterface, + video_frame_sink_stream_get_service_AddRef, + video_frame_sink_stream_get_service_Release, + video_frame_sink_stream_get_service_GetService, +}; + static HRESULT WINAPI video_frame_sink_QueryInterface(IMFMediaSink *iface, REFIID riid, void **obj) { struct video_frame_sink *sink = impl_from_IMFMediaSink(iface); @@ -628,6 +684,8 @@ static ULONG WINAPI video_frame_sink_Release(IMFMediaSink *iface) if (sink->current_media_type) IMFMediaType_Release(sink->current_media_type); IMFMediaType_Release(sink->media_type); + if (sink->device_manager) + IUnknown_Release(sink->device_manager); if (sink->event_queue) IMFMediaEventQueue_Release(sink->event_queue); if (sink->clock) @@ -1057,7 +1115,7 @@ static const IMFClockStateSinkVtbl video_frame_sink_clock_sink_vtbl = video_frame_sink_clock_sink_OnClockSetRate, }; -HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) +HRESULT create_video_frame_sink(IMFMediaType *media_type, IUnknown *device_manager, IMFAsyncCallback *events_callback, struct video_frame_sink **sink) { struct video_frame_sink *object; HRESULT hr; @@ -1070,8 +1128,11 @@ HRESULT create_video_frame_sink(IMFMediaType *media_type, IMFAsyncCallback *even object->IMFMediaEventGenerator_iface.lpVtbl = &video_frame_sink_events_vtbl; object->IMFStreamSink_iface.lpVtbl = &video_frame_sink_stream_vtbl; object->IMFMediaTypeHandler_iface.lpVtbl = &video_frame_sink_stream_type_handler_vtbl; + object->IMFGetService_iface.lpVtbl = &video_frame_sink_stream_get_service_vtbl; object->refcount = 1; object->rate = 1.0f; + if ((object->device_manager = device_manager)) + IUnknown_AddRef(object->device_manager); object->media_type = media_type; IMFAsyncCallback_AddRef(object->callback = events_callback); IMFMediaType_AddRef(object->media_type); @@ -1188,11 +1249,6 @@ HRESULT video_frame_sink_get_pts(struct video_frame_sink *sink, MFTIME clocktime return hr; } -void video_frame_sink_notify_end_of_presentation(struct video_frame_sink *sink) -{ - sink->eos = TRUE; -} - ULONG video_frame_sink_release(struct video_frame_sink *sink) { return video_frame_sink_Release(&sink->IMFMediaSink_iface); diff --git a/dlls/mfplat/buffer.c b/dlls/mfplat/buffer.c index b7f32f12cdc..d1fd2a99529 100644 --- a/dlls/mfplat/buffer.c +++ b/dlls/mfplat/buffer.c @@ -1004,6 +1004,7 @@ static void dxgi_surface_buffer_unmap(struct buffer *buffer, MF2DBuffer_LockFlag { ID3D11DeviceContext_CopySubresourceRegion(immediate_context, (ID3D11Resource *)buffer->dxgi_surface.texture, buffer->dxgi_surface.sub_resource_idx, 0, 0, 0, (ID3D11Resource *)buffer->dxgi_surface.rb_texture, 0, NULL); + ID3D11DeviceContext_Flush(immediate_context); } ID3D11DeviceContext_Release(immediate_context); @@ -1688,8 +1689,10 @@ HRESULT WINAPI MFCreateMediaBufferFromMediaType(IMFMediaType *media_type, LONGLO { UINT32 length = 0, block_alignment; LONGLONG avg_length; + GUID major, subtype; + UINT64 frame_size; + BOOL is_yuv; HRESULT hr; - GUID major; TRACE("%p, %s, %lu, %lu, %p.\n", media_type, debugstr_time(duration), min_length, alignment, buffer); @@ -1731,8 +1734,24 @@ HRESULT WINAPI MFCreateMediaBufferFromMediaType(IMFMediaType *media_type, LONGLO return create_1d_buffer(length, alignment - 1, buffer); } - else - FIXME("Major type %s is not supported.\n", debugstr_guid(&major)); + else if (IsEqualGUID(&major, &MFMediaType_Video) + && SUCCEEDED(hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &subtype)) + && SUCCEEDED(hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &frame_size)) + && mf_format_get_stride(&subtype, frame_size >> 32, &is_yuv)) + { + BOOL bottom_up = FALSE; + UINT32 stride; - return E_NOTIMPL; + if (!is_yuv && SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &stride))) + bottom_up = (int)stride < 0; + + if (SUCCEEDED(hr = create_2d_buffer(frame_size >> 32, (UINT32)frame_size, subtype.Data1, bottom_up, buffer))) + return hr; + } + + if (!min_length) + return FAILED(hr) ? hr : E_INVALIDARG; + + alignment = max(16, alignment); + return create_1d_buffer(min_length, alignment - 1, buffer); } diff --git a/dlls/mfplat/main.c b/dlls/mfplat/main.c index 695b86548f3..1c5e16cdbbc 100644 --- a/dlls/mfplat/main.c +++ b/dlls/mfplat/main.c @@ -6298,18 +6298,11 @@ static HRESULT resolver_create_gstreamer_handler(IMFByteStreamHandler **handler) static const GUID CLSID_GStreamerByteStreamHandler = {0x317df618, 0x5e5a, 0x468a, {0x9f, 0x15, 0xd8, 0x27, 0xa9, 0xa0, 0x81, 0x62}}; static const GUID CLSID_GStreamerByteStreamHandler2 = {0x317df619, 0x5e5a, 0x468a, {0x9f, 0x15, 0xd8, 0x27, 0xa9, 0xa0, 0x81, 0x62}}; - const char *env = getenv("WINE_NEW_MEDIA_SOURCE"), *sgi = getenv("SteamGameId"); - if (!env && sgi) - { - if (!strcmp(sgi, "399810") /* Call of Cthulhu */) env = "1"; - if (!strcmp(sgi, "606880") /* Greedfall */) env = "1"; - if (!strcmp(sgi, "692850") /* Bloodstained */) env = "1"; - if (!strcmp(sgi, "782630") /* Twisted Sails */) env = "1"; - if (!strcmp(sgi, "789910") /* Planet of the Apes: Last Frontier */) env = "1"; - } - if (env && atoi(env)) return CoCreateInstance(&CLSID_GStreamerByteStreamHandler2, NULL, CLSCTX_INPROC_SERVER, &IID_IMFByteStreamHandler, (void **)handler); + /* CW-Bug-Id: 20833 keep old media source around if we need a comparison point */ + const char *env = getenv("WINE_NEW_MEDIA_SOURCE"); + if (env && !atoi(env)) return CoCreateInstance(&CLSID_GStreamerByteStreamHandler, NULL, CLSCTX_INPROC_SERVER, &IID_IMFByteStreamHandler, (void **)handler); - return CoCreateInstance(&CLSID_GStreamerByteStreamHandler, NULL, CLSCTX_INPROC_SERVER, &IID_IMFByteStreamHandler, (void **)handler); + return CoCreateInstance(&CLSID_GStreamerByteStreamHandler2, NULL, CLSCTX_INPROC_SERVER, &IID_IMFByteStreamHandler, (void **)handler); } static HRESULT resolver_get_bytestream_handler(IMFByteStream *stream, const WCHAR *url, DWORD flags, diff --git a/dlls/mfplat/mediatype.c b/dlls/mfplat/mediatype.c index ea863eac5f3..3944d64849e 100644 --- a/dlls/mfplat/mediatype.c +++ b/dlls/mfplat/mediatype.c @@ -21,14 +21,17 @@ #include "mfplat_private.h" #include +#include "dxva.h" #include "dxva2api.h" #include "uuids.h" #include "strmif.h" #include "initguid.h" +#include "dvdmedia.h" #include "ks.h" #include "ksmedia.h" #include "amvideo.h" #include "wmcodecdsp.h" +#include "wmsdkidl.h" WINE_DEFAULT_DEBUG_CHANNEL(mfplat); @@ -2698,13 +2701,14 @@ struct uncompressed_video_format static int __cdecl uncompressed_video_format_compare(const void *a, const void *b) { - const GUID *guid = a; - const struct uncompressed_video_format *format = b; - return memcmp(guid, format->subtype, sizeof(*guid)); + const struct uncompressed_video_format *a_format = a, *b_format = b; + return memcmp(a_format->subtype, b_format->subtype, sizeof(GUID)); } -static const struct uncompressed_video_format video_formats[] = +static struct uncompressed_video_format video_formats[] = { + { &MFVideoFormat_RGB1, 1, 0, 1, 0, BI_RGB }, + { &MFVideoFormat_RGB4, 4, 0, 1, 0, BI_RGB }, { &MFVideoFormat_RGB24, 24, 3, 1, 0, BI_RGB }, { &MFVideoFormat_ARGB32, 32, 3, 1, 0, BI_RGB }, { &MFVideoFormat_RGB32, 32, 3, 1, 0, BI_RGB }, @@ -2738,14 +2742,26 @@ static const struct uncompressed_video_format video_formats[] = { &MEDIASUBTYPE_RGB32, 32, 3, 1, 0, BI_RGB }, }; +static BOOL WINAPI mf_video_formats_init(INIT_ONCE *once, void *param, void **context) +{ + qsort(video_formats, ARRAY_SIZE(video_formats), sizeof(*video_formats), uncompressed_video_format_compare); + return TRUE; +} + static struct uncompressed_video_format *mf_get_video_format(const GUID *subtype) { - return bsearch(subtype, video_formats, ARRAY_SIZE(video_formats), sizeof(*video_formats), + static INIT_ONCE init_once = INIT_ONCE_STATIC_INIT; + struct uncompressed_video_format key = {.subtype = subtype}; + + InitOnceExecuteOnce(&init_once, mf_video_formats_init, NULL, NULL); + + return bsearch(&key, video_formats, ARRAY_SIZE(video_formats), sizeof(*video_formats), uncompressed_video_format_compare); } static unsigned int mf_get_stride_for_format(const struct uncompressed_video_format *format, unsigned int width) { + if (format->bpp < 8) return (width * format->bpp) / 8; return (width * (format->bpp / 8) + format->alignment) & ~format->alignment; } @@ -2959,15 +2975,21 @@ HRESULT WINAPI MFUnwrapMediaType(IMFMediaType *wrapper, IMFMediaType **ret) return S_OK; } +static UINT32 media_type_get_uint32(IMFMediaType *media_type, REFGUID guid) +{ + UINT32 value; + return SUCCEEDED(IMFMediaType_GetUINT32(media_type, guid, &value)) ? value : 0; +} + /*********************************************************************** * MFCreateWaveFormatExFromMFMediaType (mfplat.@) */ HRESULT WINAPI MFCreateWaveFormatExFromMFMediaType(IMFMediaType *mediatype, WAVEFORMATEX **ret_format, UINT32 *size, UINT32 flags) { - UINT32 value, extra_size = 0, user_size; + UINT32 extra_size = 0, user_size; WAVEFORMATEX *format; - GUID major, subtype; + GUID major, subtype, basetype = MFAudioFormat_Base; void *user_data; HRESULT hr; @@ -2989,6 +3011,19 @@ HRESULT WINAPI MFCreateWaveFormatExFromMFMediaType(IMFMediaType *mediatype, WAVE user_size = 0; } + if (media_type_get_uint32(mediatype, &MF_MT_AUDIO_NUM_CHANNELS) > 2 + && SUCCEEDED(IMFMediaType_GetItem(mediatype, &MF_MT_AUDIO_CHANNEL_MASK, NULL))) + { + if (SUCCEEDED(IMFMediaType_GetItem(mediatype, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, NULL))) + flags = MFWaveFormatExConvertFlag_ForceExtensible; + if (SUCCEEDED(IMFMediaType_GetItem(mediatype, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, NULL))) + flags = MFWaveFormatExConvertFlag_ForceExtensible; + } + + basetype.Data1 = subtype.Data1; + if (subtype.Data1 >> 16 || !IsEqualGUID(&subtype, &basetype)) + flags = MFWaveFormatExConvertFlag_ForceExtensible; + if (flags == MFWaveFormatExConvertFlag_ForceExtensible) extra_size = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(*format); @@ -3001,16 +3036,11 @@ HRESULT WINAPI MFCreateWaveFormatExFromMFMediaType(IMFMediaType *mediatype, WAVE format->cbSize = user_size + extra_size; user_data = format + 1; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_NUM_CHANNELS, &value))) - format->nChannels = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &value))) - format->nSamplesPerSec = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &value))) - format->nAvgBytesPerSec = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &value))) - format->nBlockAlign = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_BITS_PER_SAMPLE, &value))) - format->wBitsPerSample = value; + format->nChannels = media_type_get_uint32(mediatype, &MF_MT_AUDIO_NUM_CHANNELS); + format->nSamplesPerSec = media_type_get_uint32(mediatype, &MF_MT_AUDIO_SAMPLES_PER_SECOND); + format->nAvgBytesPerSec = media_type_get_uint32(mediatype, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND); + format->nBlockAlign = media_type_get_uint32(mediatype, &MF_MT_AUDIO_BLOCK_ALIGNMENT); + format->wBitsPerSample = media_type_get_uint32(mediatype, &MF_MT_AUDIO_BITS_PER_SAMPLE); if (flags == MFWaveFormatExConvertFlag_ForceExtensible) { @@ -3020,11 +3050,11 @@ HRESULT WINAPI MFCreateWaveFormatExFromMFMediaType(IMFMediaType *mediatype, WAVE format_ext->SubFormat = subtype; user_data = format_ext + 1; - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, &value))) - format_ext->Samples.wSamplesPerBlock = value; + format_ext->Samples.wValidBitsPerSample = media_type_get_uint32(mediatype, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE); + format_ext->Samples.wSamplesPerBlock = media_type_get_uint32(mediatype, &MF_MT_AUDIO_SAMPLES_PER_BLOCK); - if (SUCCEEDED(IMFMediaType_GetUINT32(mediatype, &MF_MT_AUDIO_CHANNEL_MASK, &value))) - format_ext->dwChannelMask = value; + if (SUCCEEDED(IMFMediaType_GetItem(mediatype, &MF_MT_AUDIO_CHANNEL_MASK, NULL))) + format_ext->dwChannelMask = media_type_get_uint32(mediatype, &MF_MT_AUDIO_CHANNEL_MASK); else if (format_ext->Format.nChannels < ARRAY_SIZE(default_channel_mask)) format_ext->dwChannelMask = default_channel_mask[format_ext->Format.nChannels]; } @@ -3067,6 +3097,8 @@ static void mediatype_set_blob(IMFMediaType *mediatype, const GUID *attr, const HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WAVEFORMATEX *format, UINT32 size) { const WAVEFORMATEXTENSIBLE *wfex = (const WAVEFORMATEXTENSIBLE *)format; + const void *user_data; + int user_data_size; GUID subtype; HRESULT hr; @@ -3091,6 +3123,9 @@ HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WA if (format->wBitsPerSample && wfex->Samples.wValidBitsPerSample) mediatype_set_uint32(mediatype, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, wfex->Samples.wValidBitsPerSample, &hr); + + user_data_size = format->cbSize - sizeof(WAVEFORMATEXTENSIBLE) + sizeof(WAVEFORMATEX); + user_data = wfex + 1; } else { @@ -3098,6 +3133,8 @@ HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WA subtype.Data1 = format->wFormatTag; mediatype_set_uint32(mediatype, &MF_MT_AUDIO_PREFER_WAVEFORMATEX, 1, &hr); + user_data_size = format->cbSize; + user_data = format + 1; } mediatype_set_guid(mediatype, &MF_MT_SUBTYPE, &subtype, &hr); @@ -3131,8 +3168,8 @@ HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WA mediatype_set_uint32(mediatype, &MF_MT_AAC_PAYLOAD_TYPE, info->wPayloadType, &hr); } - if (format->cbSize && format->wFormatTag != WAVE_FORMAT_EXTENSIBLE) - mediatype_set_blob(mediatype, &MF_MT_USER_DATA, (const UINT8 *)(format + 1), format->cbSize, &hr); + if (user_data_size > 0) + mediatype_set_blob(mediatype, &MF_MT_USER_DATA, user_data, user_data_size, &hr); return hr; } @@ -3228,17 +3265,20 @@ HRESULT WINAPI MFCreateAMMediaTypeFromMFMediaType(IMFMediaType *media_type, GUID */ HRESULT WINAPI MFCreateMFVideoFormatFromMFMediaType(IMFMediaType *media_type, MFVIDEOFORMAT **video_format, UINT32 *size) { - UINT32 flags, palette_size = 0, value; + UINT32 palette_size = 0, user_data_size = 0; MFVIDEOFORMAT *format; INT32 stride; GUID guid; TRACE("%p, %p, %p.\n", media_type, video_format, size); - *size = sizeof(*format); - if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_PALETTE, &palette_size))) - *size += palette_size; + *size = offsetof(MFVIDEOFORMAT, surfaceInfo.Palette[palette_size / sizeof(MFPaletteEntry) + 1]); + else + *size = sizeof(*format); + + if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_USER_DATA, &user_data_size))) + *size += user_data_size; if (!(format = CoTaskMemAlloc(*size))) return E_OUTOFMEMORY; @@ -3260,40 +3300,35 @@ HRESULT WINAPI MFCreateMFVideoFormatFromMFMediaType(IMFMediaType *media_type, MF media_type_get_ratio(media_type, &MF_MT_FRAME_RATE, &format->videoInfo.FramesPerSecond.Numerator, &format->videoInfo.FramesPerSecond.Denominator); - IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_CHROMA_SITING, &format->videoInfo.SourceChromaSubsampling); - IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &format->videoInfo.InterlaceMode); - IMFMediaType_GetUINT32(media_type, &MF_MT_TRANSFER_FUNCTION, &format->videoInfo.TransferFunction); - IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_PRIMARIES, &format->videoInfo.ColorPrimaries); - IMFMediaType_GetUINT32(media_type, &MF_MT_YUV_MATRIX, &format->videoInfo.TransferMatrix); - IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_LIGHTING, &format->videoInfo.SourceLighting); - IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, &format->videoInfo.NominalRange); + format->videoInfo.SourceChromaSubsampling = media_type_get_uint32(media_type, &MF_MT_VIDEO_CHROMA_SITING); + format->videoInfo.InterlaceMode = media_type_get_uint32(media_type, &MF_MT_INTERLACE_MODE); + format->videoInfo.TransferFunction = media_type_get_uint32(media_type, &MF_MT_TRANSFER_FUNCTION); + format->videoInfo.ColorPrimaries = media_type_get_uint32(media_type, &MF_MT_VIDEO_PRIMARIES); + format->videoInfo.TransferMatrix = media_type_get_uint32(media_type, &MF_MT_YUV_MATRIX); + format->videoInfo.SourceLighting = media_type_get_uint32(media_type, &MF_MT_VIDEO_LIGHTING); + format->videoInfo.NominalRange = media_type_get_uint32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE); IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (UINT8 *)&format->videoInfo.GeometricAperture, sizeof(format->videoInfo.GeometricAperture), NULL); IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8 *)&format->videoInfo.MinimumDisplayAperture, sizeof(format->videoInfo.MinimumDisplayAperture), NULL); /* Video flags. */ - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_PAD_CONTROL_FLAGS, &flags))) - format->videoInfo.VideoFlags |= flags; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_SOURCE_CONTENT_HINT, &flags))) - format->videoInfo.VideoFlags |= flags; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_DRM_FLAGS, &flags))) - format->videoInfo.VideoFlags |= flags; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &flags)) && !!flags) + format->videoInfo.VideoFlags |= media_type_get_uint32(media_type, &MF_MT_PAD_CONTROL_FLAGS) & MFVideoFlag_PAD_TO_Mask; + format->videoInfo.VideoFlags |= (media_type_get_uint32(media_type, &MF_MT_SOURCE_CONTENT_HINT) << 2) & MFVideoFlag_SrcContentHintMask; + format->videoInfo.VideoFlags |= (media_type_get_uint32(media_type, &MF_MT_DRM_FLAGS) << 5) & (MFVideoFlag_AnalogProtected | MFVideoFlag_DigitallyProtected); + if (media_type_get_uint32(media_type, &MF_MT_PAN_SCAN_ENABLED)) { format->videoInfo.VideoFlags |= MFVideoFlag_PanScanEnabled; IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (UINT8 *)&format->videoInfo.PanScanAperture, sizeof(format->videoInfo.PanScanAperture), NULL); } - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, (UINT32 *)&stride)) && stride < 0) + stride = media_type_get_uint32(media_type, &MF_MT_DEFAULT_STRIDE); + if (stride < 0) format->videoInfo.VideoFlags |= MFVideoFlag_BottomUpLinearRep; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value))) - format->compressedInfo.AvgBitrate = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value))) - format->compressedInfo.AvgBitErrorRate = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_MAX_KEYFRAME_SPACING, &value))) - format->compressedInfo.MaxKeyFrameSpacing = value; + format->compressedInfo.AvgBitrate = media_type_get_uint32(media_type, &MF_MT_AVG_BITRATE); + format->compressedInfo.AvgBitErrorRate = media_type_get_uint32(media_type, &MF_MT_AVG_BIT_ERROR_RATE); + format->compressedInfo.MaxKeyFrameSpacing = media_type_get_uint32(media_type, &MF_MT_MAX_KEYFRAME_SPACING); /* Palette. */ if (palette_size) @@ -3302,6 +3337,11 @@ HRESULT WINAPI MFCreateMFVideoFormatFromMFMediaType(IMFMediaType *media_type, MF IMFMediaType_GetBlob(media_type, &MF_MT_PALETTE, (UINT8 *)format->surfaceInfo.Palette, palette_size, NULL); } + if (user_data_size) + { + IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, (UINT8 *)format + *size - user_data_size, user_data_size, NULL); + } + return S_OK; } @@ -3340,15 +3380,45 @@ HRESULT WINAPI MFConvertColorInfoToDXVA(DWORD *dxva_info, const MFVIDEOFORMAT *f struct frame_rate { - UINT64 key; - UINT64 value; + UINT64 time; + UINT64 rate; +}; + +static const struct frame_rate known_rates[] = +{ +#define KNOWN_RATE(ft,n,d) { ft, ((UINT64)n << 32) | d } + KNOWN_RATE(417188, 24000, 1001), + KNOWN_RATE(416667, 24, 1), + KNOWN_RATE(400000, 25, 1), + KNOWN_RATE(333667, 30000, 1001), + KNOWN_RATE(333333, 30, 1), + KNOWN_RATE(200000, 50, 1), + KNOWN_RATE(166833, 60000, 1001), + KNOWN_RATE(166667, 60, 1), +#undef KNOWN_RATE }; -static int __cdecl frame_rate_compare(const void *a, const void *b) +static const struct frame_rate *known_rate_from_rate(UINT64 rate) +{ + UINT i; + for (i = 0; i < ARRAY_SIZE(known_rates); i++) + { + if (rate == known_rates[i].rate) + return known_rates + i; + } + return NULL; +} + +static const struct frame_rate *known_rate_from_time(UINT64 time) { - const UINT64 *key = a; - const struct frame_rate *known_rate = b; - return *key == known_rate->key ? 0 : ( *key < known_rate->key ? 1 : -1 ); + UINT i; + for (i = 0; i < ARRAY_SIZE(known_rates); i++) + { + if (time >= known_rates[i].time - 30 + && time <= known_rates[i].time + 30) + return known_rates + i; + } + return NULL; } /*********************************************************************** @@ -3356,29 +3426,13 @@ static int __cdecl frame_rate_compare(const void *a, const void *b) */ HRESULT WINAPI MFFrameRateToAverageTimePerFrame(UINT32 numerator, UINT32 denominator, UINT64 *avgframetime) { - static const struct frame_rate known_rates[] = - { -#define KNOWN_RATE(n,d,ft) { ((UINT64)n << 32) | d, ft } - KNOWN_RATE(60000, 1001, 166833), - KNOWN_RATE(30000, 1001, 333667), - KNOWN_RATE(24000, 1001, 417188), - KNOWN_RATE(60, 1, 166667), - KNOWN_RATE(50, 1, 200000), - KNOWN_RATE(30, 1, 333333), - KNOWN_RATE(25, 1, 400000), - KNOWN_RATE(24, 1, 416667), -#undef KNOWN_RATE - }; UINT64 rate = ((UINT64)numerator << 32) | denominator; const struct frame_rate *entry; TRACE("%u, %u, %p.\n", numerator, denominator, avgframetime); - if ((entry = bsearch(&rate, known_rates, ARRAY_SIZE(known_rates), sizeof(*known_rates), - frame_rate_compare))) - { - *avgframetime = entry->value; - } + if ((entry = known_rate_from_rate(rate))) + *avgframetime = entry->time; else *avgframetime = numerator ? denominator * (UINT64)10000000 / numerator : 0; @@ -3404,29 +3458,15 @@ static unsigned int get_gcd(unsigned int a, unsigned int b) */ HRESULT WINAPI MFAverageTimePerFrameToFrameRate(UINT64 avgtime, UINT32 *numerator, UINT32 *denominator) { - static const struct frame_rate known_rates[] = - { -#define KNOWN_RATE(ft,n,d) { ft, ((UINT64)n << 32) | d } - KNOWN_RATE(417188, 24000, 1001), - KNOWN_RATE(416667, 24, 1), - KNOWN_RATE(400000, 25, 1), - KNOWN_RATE(333667, 30000, 1001), - KNOWN_RATE(333333, 30, 1), - KNOWN_RATE(200000, 50, 1), - KNOWN_RATE(166833, 60000, 1001), - KNOWN_RATE(166667, 60, 1), -#undef KNOWN_RATE - }; const struct frame_rate *entry; unsigned int gcd; TRACE("%s, %p, %p.\n", wine_dbgstr_longlong(avgtime), numerator, denominator); - if ((entry = bsearch(&avgtime, known_rates, ARRAY_SIZE(known_rates), sizeof(*known_rates), - frame_rate_compare))) + if ((entry = known_rate_from_time(avgtime))) { - *numerator = entry->value >> 32; - *denominator = entry->value; + *numerator = entry->rate >> 32; + *denominator = entry->rate; } else if (avgtime) { @@ -3594,6 +3634,8 @@ DXGI_FORMAT WINAPI MFMapDX9FormatToDXGIFormat(DWORD format) return DXGI_FORMAT_B8G8R8A8_UNORM; case D3DFMT_X8R8G8B8: return DXGI_FORMAT_B8G8R8X8_UNORM; + case D3DFMT_A8B8G8R8: + return DXGI_FORMAT_R8G8B8A8_UNORM; case MAKEFOURCC('A','Y','U','V'): return DXGI_FORMAT_AYUV; case MAKEFOURCC('Y','4','1','0'): @@ -3624,8 +3666,6 @@ DXGI_FORMAT WINAPI MFMapDX9FormatToDXGIFormat(DWORD format) return DXGI_FORMAT_P8; case D3DFMT_A8P8: return DXGI_FORMAT_A8P8; - case D3DFMT_A8B8G8R8: - return DXGI_FORMAT_R8G8B8A8_UNORM; default: return DXGI_FORMAT_UNKNOWN; } @@ -3723,6 +3763,30 @@ static const GUID * get_mf_subtype_for_am_subtype(const GUID *subtype) return subtype; } +HRESULT WINAPI MFCreateVideoMediaType(const MFVIDEOFORMAT *format, IMFVideoMediaType **media_type) +{ + struct media_type *object; + HRESULT hr; + + TRACE("%p, %p.\n", format, media_type); + + if (!media_type) + return E_INVALIDARG; + + if (FAILED(hr = create_media_type(&object))) + return hr; + + if (FAILED(hr = MFInitMediaTypeFromMFVideoFormat(&object->IMFMediaType_iface, format, format->dwSize))) + { + IMFMediaType_Release(&object->IMFMediaType_iface); + return hr; + } + + *media_type = &object->IMFVideoMediaType_iface; + + return hr; +} + /*********************************************************************** * MFCreateVideoMediaTypeFromVideoInfoHeader (mfplat.@) */ @@ -3737,23 +3801,131 @@ HRESULT WINAPI MFCreateVideoMediaTypeFromVideoInfoHeader(const KS_VIDEOINFOHEADE } /*********************************************************************** - * MFInitMediaTypeFromVideoInfoHeader (mfplat.@) + * MFInitMediaTypeFromMFVideoFormat (mfplat.@) */ -HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader(IMFMediaType *media_type, const VIDEOINFOHEADER *vih, UINT32 size, +HRESULT WINAPI MFInitMediaTypeFromMFVideoFormat(IMFMediaType *media_type, const MFVIDEOFORMAT *format, UINT32 size) +{ + UINT32 stride, sample_size, palette_size, user_data_size, value; + struct uncompressed_video_format *video_format; + const void *user_data; + HRESULT hr = S_OK; + + TRACE("%p, %p, %u\n", media_type, format, size); + + if (!format || size < sizeof(*format) || format->dwSize != size) + return E_INVALIDARG; + if (size < offsetof(MFVIDEOFORMAT, surfaceInfo.Palette[format->surfaceInfo.PaletteEntries + 1])) + return E_INVALIDARG; + + mediatype_set_guid(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video, &hr); + if (!IsEqualGUID(&format->guidFormat, &GUID_NULL)) + mediatype_set_guid(media_type, &MF_MT_SUBTYPE, &format->guidFormat, &hr); + if ((video_format = mf_get_video_format(&format->guidFormat))) + { + mediatype_set_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, 1, &hr); + mediatype_set_uint32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, 1, &hr); + } + + if (format->videoInfo.dwWidth && format->videoInfo.dwHeight) + { + mediatype_set_uint64(media_type, &MF_MT_FRAME_SIZE, format->videoInfo.dwWidth, format->videoInfo.dwHeight, &hr); + + if (video_format && (stride = mf_get_stride_for_format(video_format, format->videoInfo.dwWidth))) + { + if (!video_format->yuv && (format->videoInfo.VideoFlags & MFVideoFlag_BottomUpLinearRep)) + stride = -stride; + mediatype_set_uint32(media_type, &MF_MT_DEFAULT_STRIDE, stride, &hr); + } + + if (SUCCEEDED(MFCalculateImageSize(&format->guidFormat, format->videoInfo.dwWidth, format->videoInfo.dwHeight, &sample_size))) + mediatype_set_uint32(media_type, &MF_MT_SAMPLE_SIZE, sample_size, &hr); + } + + if (format->videoInfo.PixelAspectRatio.Denominator) + mediatype_set_uint64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, format->videoInfo.PixelAspectRatio.Numerator, + format->videoInfo.PixelAspectRatio.Denominator, &hr); + if (format->videoInfo.SourceChromaSubsampling) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_CHROMA_SITING, format->videoInfo.SourceChromaSubsampling, &hr); + if (format->videoInfo.InterlaceMode) + mediatype_set_uint32(media_type, &MF_MT_INTERLACE_MODE, format->videoInfo.InterlaceMode, &hr); + if (format->videoInfo.TransferFunction) + mediatype_set_uint32(media_type, &MF_MT_TRANSFER_FUNCTION, format->videoInfo.TransferFunction, &hr); + if (format->videoInfo.ColorPrimaries) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_PRIMARIES, format->videoInfo.ColorPrimaries, &hr); + if (format->videoInfo.TransferMatrix) + mediatype_set_uint32(media_type, &MF_MT_YUV_MATRIX, format->videoInfo.TransferMatrix, &hr); + if (format->videoInfo.SourceLighting) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_LIGHTING, format->videoInfo.SourceLighting, &hr); + if (format->videoInfo.FramesPerSecond.Denominator) + mediatype_set_uint64(media_type, &MF_MT_FRAME_RATE, format->videoInfo.FramesPerSecond.Numerator, + format->videoInfo.FramesPerSecond.Denominator, &hr); + if (format->videoInfo.NominalRange) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, format->videoInfo.NominalRange, &hr); + if (format->videoInfo.GeometricAperture.Area.cx && format->videoInfo.GeometricAperture.Area.cy) + mediatype_set_blob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&format->videoInfo.GeometricAperture, + sizeof(format->videoInfo.GeometricAperture), &hr); + if (format->videoInfo.MinimumDisplayAperture.Area.cx && format->videoInfo.MinimumDisplayAperture.Area.cy) + mediatype_set_blob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&format->videoInfo.MinimumDisplayAperture, + sizeof(format->videoInfo.MinimumDisplayAperture), &hr); + if (format->videoInfo.PanScanAperture.Area.cx && format->videoInfo.PanScanAperture.Area.cy) + mediatype_set_blob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&format->videoInfo.PanScanAperture, + sizeof(format->videoInfo.PanScanAperture), &hr); + if ((value = !!(format->videoInfo.VideoFlags & MFVideoFlag_PanScanEnabled))) + mediatype_set_uint32(media_type, &MF_MT_PAN_SCAN_ENABLED, value, &hr); + if ((value = format->videoInfo.VideoFlags & MFVideoFlag_PAD_TO_Mask)) + mediatype_set_uint32(media_type, &MF_MT_PAD_CONTROL_FLAGS, value, &hr); + if ((value = format->videoInfo.VideoFlags & MFVideoFlag_SrcContentHintMask)) + mediatype_set_uint32(media_type, &MF_MT_SOURCE_CONTENT_HINT, value >> 2, &hr); + if ((value = format->videoInfo.VideoFlags & (MFVideoFlag_AnalogProtected | MFVideoFlag_DigitallyProtected))) + mediatype_set_uint32(media_type, &MF_MT_DRM_FLAGS, value >> 5, &hr); + + if (format->compressedInfo.AvgBitrate) + mediatype_set_uint32(media_type, &MF_MT_AVG_BITRATE, format->compressedInfo.AvgBitrate, &hr); + if (format->compressedInfo.AvgBitErrorRate) + mediatype_set_uint32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, format->compressedInfo.AvgBitErrorRate, &hr); + if (format->compressedInfo.MaxKeyFrameSpacing) + mediatype_set_uint32(media_type, &MF_MT_MAX_KEYFRAME_SPACING, format->compressedInfo.MaxKeyFrameSpacing, &hr); + + if (!(palette_size = format->surfaceInfo.PaletteEntries * sizeof(*format->surfaceInfo.Palette))) + user_data = format + 1; + else + { + mediatype_set_blob(media_type, &MF_MT_PALETTE, (BYTE *)format->surfaceInfo.Palette, palette_size, &hr); + user_data = &format->surfaceInfo.Palette[format->surfaceInfo.PaletteEntries + 1]; + } + + if ((user_data_size = (BYTE *)format + format->dwSize - (BYTE *)user_data)) + mediatype_set_blob(media_type, &MF_MT_USER_DATA, user_data, user_data_size, &hr); + + return hr; +} + +/*********************************************************************** + * MFInitMediaTypeFromVideoInfoHeader2 (mfplat.@) + */ +HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader2(IMFMediaType *media_type, const VIDEOINFOHEADER2 *vih, UINT32 size, const GUID *subtype) { HRESULT hr = S_OK; DWORD height; LONG stride; - FIXME("%p, %p, %u, %s.\n", media_type, vih, size, debugstr_guid(subtype)); + TRACE("%p, %p, %u, %s.\n", media_type, vih, size, debugstr_guid(subtype)); IMFMediaType_DeleteAllItems(media_type); if (!subtype) { - FIXME("Implicit subtype is not supported.\n"); - return E_NOTIMPL; + switch (vih->bmiHeader.biBitCount) + { + case 1: subtype = &MFVideoFormat_RGB1; break; + case 4: subtype = &MFVideoFormat_RGB4; break; + case 8: subtype = &MFVideoFormat_RGB8; break; + case 16: subtype = &MFVideoFormat_RGB555; break; + case 24: subtype = &MFVideoFormat_RGB24; break; + case 32: subtype = &MFVideoFormat_RGB32; break; + default: return E_INVALIDARG; + } } height = abs(vih->bmiHeader.biHeight); @@ -3761,7 +3933,6 @@ HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader(IMFMediaType *media_type, cons mediatype_set_guid(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video, &hr); mediatype_set_guid(media_type, &MF_MT_SUBTYPE, subtype, &hr); mediatype_set_uint64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, 1, 1, &hr); - mediatype_set_uint32(media_type, &MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive, &hr); mediatype_set_uint64(media_type, &MF_MT_FRAME_SIZE, vih->bmiHeader.biWidth, height, &hr); if (SUCCEEDED(mf_get_stride_for_bitmap_info_header(subtype->Data1, &vih->bmiHeader, &stride))) @@ -3772,12 +3943,130 @@ HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader(IMFMediaType *media_type, cons mediatype_set_uint32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, 1, &hr); } + if (vih->bmiHeader.biSizeImage) + mediatype_set_uint32(media_type, &MF_MT_SAMPLE_SIZE, vih->bmiHeader.biSizeImage, &hr); + + if (vih->rcSource.left || vih->rcSource.top || vih->rcSource.right || vih->rcSource.bottom) + { + MFVideoArea aperture = {{0}}; + + aperture.OffsetX.value = vih->rcSource.left; + aperture.OffsetY.value = vih->rcSource.top; + aperture.Area.cx = vih->rcSource.right - vih->rcSource.left; + aperture.Area.cy = vih->rcSource.bottom - vih->rcSource.top; + + mediatype_set_uint32(media_type, &MF_MT_PAN_SCAN_ENABLED, 1, &hr); + mediatype_set_blob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &hr); + mediatype_set_blob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &hr); + } + + if (SUCCEEDED(hr) && vih->AvgTimePerFrame) + { + UINT32 num, den; + if (SUCCEEDED(hr = MFAverageTimePerFrameToFrameRate(vih->AvgTimePerFrame, &num, &den))) + mediatype_set_uint64(media_type, &MF_MT_FRAME_RATE, num, den, &hr); + } + + if (vih->dwControlFlags & AMCONTROL_COLORINFO_PRESENT) + { + DXVA_ExtendedFormat *format = (DXVA_ExtendedFormat *)&vih->dwControlFlags; + + if (format->VideoChromaSubsampling) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_CHROMA_SITING, format->VideoChromaSubsampling, &hr); + if (format->NominalRange) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, format->NominalRange, &hr); + if (format->VideoTransferMatrix) + mediatype_set_uint32(media_type, &MF_MT_YUV_MATRIX, format->VideoTransferMatrix, &hr); + if (format->VideoLighting) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_LIGHTING, format->VideoLighting, &hr); + if (format->VideoPrimaries) + mediatype_set_uint32(media_type, &MF_MT_VIDEO_PRIMARIES, format->VideoPrimaries, &hr); + if (format->VideoTransferFunction) + mediatype_set_uint32(media_type, &MF_MT_TRANSFER_FUNCTION, format->VideoTransferFunction, &hr); + } + + if (!(vih->dwInterlaceFlags & AMINTERLACE_IsInterlaced)) + mediatype_set_uint32(media_type, &MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive, &hr); + else if (vih->dwInterlaceFlags & AMINTERLACE_DisplayModeBobOrWeave) + mediatype_set_uint32(media_type, &MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive, &hr); + else + FIXME("dwInterlaceFlags %#lx not implemented\n", vih->dwInterlaceFlags); + return hr; } -static HRESULT init_am_media_type_audio_format(AM_MEDIA_TYPE *am_type, UINT32 user_size, IMFMediaType *media_type) +/*********************************************************************** + * MFInitMediaTypeFromVideoInfoHeader (mfplat.@) + */ +HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader(IMFMediaType *media_type, const VIDEOINFOHEADER *vih, UINT32 size, + const GUID *subtype) +{ + VIDEOINFOHEADER2 vih2 = + { + .rcSource = vih->rcSource, + .rcTarget = vih->rcTarget, + .dwBitRate = vih->dwBitRate, + .dwBitErrorRate = vih->dwBitErrorRate, + .AvgTimePerFrame = vih->AvgTimePerFrame, + .bmiHeader = vih->bmiHeader, + }; + + TRACE("%p, %p, %u, %s.\n", media_type, vih, size, debugstr_guid(subtype)); + + return MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih2, sizeof(vih2), subtype); +} + +/*********************************************************************** + * MFInitMediaTypeFromMPEG1VideoInfo (mfplat.@) + */ +HRESULT WINAPI MFInitMediaTypeFromMPEG1VideoInfo(IMFMediaType *media_type, const MPEG1VIDEOINFO *vih, UINT32 size, + const GUID *subtype) +{ + HRESULT hr; + + TRACE("%p, %p, %u, %s.\n", media_type, vih, size, debugstr_guid(subtype)); + + if (FAILED(hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih->hdr, sizeof(vih->hdr), subtype))) + return hr; + + if (vih->dwStartTimeCode) + mediatype_set_uint32(media_type, &MF_MT_MPEG_START_TIME_CODE, vih->dwStartTimeCode, &hr); + if (vih->cbSequenceHeader) + mediatype_set_blob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, vih->bSequenceHeader, vih->cbSequenceHeader, &hr); + + return hr; +} + +/*********************************************************************** + * MFInitMediaTypeFromMPEG2VideoInfo (mfplat.@) + */ +HRESULT WINAPI MFInitMediaTypeFromMPEG2VideoInfo(IMFMediaType *media_type, const MPEG2VIDEOINFO *vih, UINT32 size, + const GUID *subtype) +{ + HRESULT hr; + + TRACE("%p, %p, %u, %s.\n", media_type, vih, size, debugstr_guid(subtype)); + + if (FAILED(hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih->hdr, sizeof(vih->hdr), subtype))) + return hr; + + if (vih->dwStartTimeCode) + mediatype_set_uint32(media_type, &MF_MT_MPEG_START_TIME_CODE, vih->dwStartTimeCode, &hr); + if (vih->cbSequenceHeader) + mediatype_set_blob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, (BYTE *)vih->dwSequenceHeader, vih->cbSequenceHeader, &hr); + + if (vih->dwProfile) + mediatype_set_uint32(media_type, &MF_MT_MPEG2_PROFILE, vih->dwProfile, &hr); + if (vih->dwLevel) + mediatype_set_uint32(media_type, &MF_MT_MPEG2_LEVEL, vih->dwLevel, &hr); + if (vih->dwFlags) + mediatype_set_uint32(media_type, &MF_MT_MPEG2_FLAGS, vih->dwFlags, &hr); + + return hr; +} + +static HRESULT init_am_media_type_audio_format(AM_MEDIA_TYPE *am_type, IMFMediaType *media_type) { - UINT32 num_channels, value; HRESULT hr; if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo) @@ -3788,68 +4077,17 @@ static HRESULT init_am_media_type_audio_format(AM_MEDIA_TYPE *am_type, UINT32 us if (IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx) || IsEqualGUID(&am_type->formattype, &GUID_NULL)) { - WAVEFORMATEX *format; - - if (FAILED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, &num_channels))) - num_channels = 0; + UINT32 flags = 0, num_channels = media_type_get_uint32(media_type, &MF_MT_AUDIO_NUM_CHANNELS); if (SUCCEEDED(IMFMediaType_GetItem(media_type, &MF_MT_AUDIO_CHANNEL_MASK, NULL)) || SUCCEEDED(IMFMediaType_GetItem(media_type, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, NULL)) || SUCCEEDED(IMFMediaType_GetItem(media_type, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, NULL)) || num_channels > 2) - { - WAVEFORMATEXTENSIBLE *format_ext; - - am_type->cbFormat = sizeof(*format_ext) + user_size; - if (!(am_type->pbFormat = CoTaskMemAlloc(am_type->cbFormat))) - return E_OUTOFMEMORY; - format_ext = (WAVEFORMATEXTENSIBLE *)am_type->pbFormat; - memset(format_ext, 0, sizeof(*format_ext)); - - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_CHANNEL_MASK, &value))) - format_ext->dwChannelMask = value; - else if (num_channels < ARRAY_SIZE(default_channel_mask)) - format_ext->dwChannelMask = default_channel_mask[num_channels]; - - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, &value))) - format_ext->Samples.wValidBitsPerSample = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, &value))) - format_ext->Samples.wSamplesPerBlock = value; - format_ext->SubFormat = am_type->subtype; - - format = &format_ext->Format; - format->wFormatTag = WAVE_FORMAT_EXTENSIBLE; - format->cbSize = sizeof(*format_ext) - sizeof(*format) + user_size; - - if (user_size && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, - (BYTE *)(format_ext + 1), user_size, NULL))) - return hr; - } - else - { - am_type->cbFormat = sizeof(*format) + user_size; - if (!(am_type->pbFormat = CoTaskMemAlloc(am_type->cbFormat))) - return E_OUTOFMEMORY; - format = (WAVEFORMATEX *)am_type->pbFormat; - memset(format, 0, sizeof(*format)); - - format->wFormatTag = am_type->subtype.Data1; - format->cbSize = user_size; - - if (user_size && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, - (BYTE *)(format + 1), user_size, NULL))) - return hr; - } + flags = MFWaveFormatExConvertFlag_ForceExtensible; - format->nChannels = num_channels; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &value))) - format->nSamplesPerSec = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &value))) - format->nAvgBytesPerSec = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &value))) - format->nBlockAlign = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, &value))) - format->wBitsPerSample = value; + if (FAILED(hr = MFCreateWaveFormatExFromMFMediaType(media_type, (WAVEFORMATEX **)&am_type->pbFormat, + (UINT32 *)&am_type->cbFormat, flags))) + return hr; am_type->subtype = get_am_subtype_for_mf_subtype(am_type->subtype); am_type->formattype = FORMAT_WaveFormatEx; @@ -3863,11 +4101,133 @@ static HRESULT init_am_media_type_audio_format(AM_MEDIA_TYPE *am_type, UINT32 us return S_OK; } -static HRESULT init_am_media_type_video_format(AM_MEDIA_TYPE *am_type, UINT32 user_size, IMFMediaType *media_type) +static void init_video_info_header2(VIDEOINFOHEADER2 *vih, const GUID *subtype, IMFMediaType *media_type) { - UINT32 image_size, bitrate, sample_size; + struct uncompressed_video_format *video_format = mf_get_video_format(subtype); + DXVA_ExtendedFormat *format = (DXVA_ExtendedFormat *)&vih->dwControlFlags; + UINT32 image_size, width, height, value; UINT64 frame_size, frame_rate; - INT32 width, height; + + vih->bmiHeader.biSize = sizeof(vih->bmiHeader); + vih->bmiHeader.biPlanes = 1; + vih->bmiHeader.biBitCount = video_format ? video_format->bpp : 0; + + if (video_format && video_format->compression != -1) + vih->bmiHeader.biCompression = video_format->compression; + else + vih->bmiHeader.biCompression = subtype->Data1; + + vih->dwBitRate = media_type_get_uint32(media_type, &MF_MT_AVG_BITRATE); + vih->dwBitErrorRate = media_type_get_uint32(media_type, &MF_MT_AVG_BIT_ERROR_RATE); + if (SUCCEEDED(IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &frame_rate)) && (frame_rate >> 32)) + vih->AvgTimePerFrame = round(10000000. * (UINT32)frame_rate / (frame_rate >> 32)); + vih->bmiHeader.biSizeImage = media_type_get_uint32(media_type, &MF_MT_SAMPLE_SIZE); + + if (SUCCEEDED(IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &frame_size))) + { + BOOL bottom_up = vih->bmiHeader.biCompression == BI_RGB || vih->bmiHeader.biCompression == BI_BITFIELDS; + INT32 stride; + + width = frame_size >> 32; + if (!(stride = media_type_get_uint32(media_type, &MF_MT_DEFAULT_STRIDE))) + stride = width * (bottom_up ? -1 : 1); + else if (video_format) + stride /= video_format->bpp / 8; + height = (UINT32)frame_size; + + vih->bmiHeader.biWidth = abs(stride); + vih->bmiHeader.biHeight = height * (bottom_up && stride >= 0 ? -1 : 1); + + if (SUCCEEDED(MFCalculateImageSize(subtype, abs(stride), height, &image_size))) + vih->bmiHeader.biSizeImage = image_size; + + if (vih->bmiHeader.biWidth > width) + { + vih->rcSource.right = vih->rcTarget.right = width; + vih->rcSource.bottom = vih->rcTarget.bottom = height; + } + } + + format->VideoChromaSubsampling = media_type_get_uint32(media_type, &MF_MT_VIDEO_CHROMA_SITING); + format->NominalRange = media_type_get_uint32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE); + format->VideoTransferMatrix = media_type_get_uint32(media_type, &MF_MT_YUV_MATRIX); + format->VideoLighting = media_type_get_uint32(media_type, &MF_MT_VIDEO_LIGHTING); + format->VideoPrimaries = media_type_get_uint32(media_type, &MF_MT_VIDEO_PRIMARIES); + format->VideoTransferFunction = media_type_get_uint32(media_type, &MF_MT_TRANSFER_FUNCTION); + + if (format->VideoChromaSubsampling || format->NominalRange || format->VideoTransferMatrix + || format->VideoLighting || format->VideoPrimaries || format->VideoTransferFunction) + format->SampleFormat = AMCONTROL_COLORINFO_PRESENT; + + switch ((value = media_type_get_uint32(media_type, &MF_MT_INTERLACE_MODE))) + { + case MFVideoInterlace_Unknown: + case MFVideoInterlace_Progressive: + break; + case MFVideoInterlace_MixedInterlaceOrProgressive: + vih->dwInterlaceFlags = AMINTERLACE_DisplayModeBobOrWeave | AMINTERLACE_IsInterlaced; + break; + default: + FIXME("MF_MT_INTERLACE_MODE %u not implemented!\n", value); + vih->dwInterlaceFlags = AMINTERLACE_IsInterlaced; + break; + } +} + +static void init_video_info_header(VIDEOINFOHEADER *vih, const GUID *subtype, IMFMediaType *media_type) +{ + VIDEOINFOHEADER2 vih2 = {{0}}; + + init_video_info_header2(&vih2, subtype, media_type); + + vih->rcSource = vih2.rcSource; + vih->rcTarget = vih2.rcTarget; + vih->dwBitRate = vih2.dwBitRate; + vih->dwBitErrorRate = vih2.dwBitErrorRate; + vih->AvgTimePerFrame = vih2.AvgTimePerFrame; + vih->bmiHeader = vih2.bmiHeader; +} + +static UINT32 get_am_media_type_video_format_size(const GUID *format_type, IMFMediaType *media_type) +{ + if (IsEqualGUID(format_type, &FORMAT_VideoInfo)) + { + UINT32 size = sizeof(VIDEOINFOHEADER), user_size; + if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_USER_DATA, &user_size))) + size += user_size; + return size; + } + + if (IsEqualGUID(format_type, &FORMAT_VideoInfo2)) + { + UINT32 size = sizeof(VIDEOINFOHEADER2), user_size; + if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_USER_DATA, &user_size))) + size += user_size; + return size; + } + + if (IsEqualGUID(format_type, &FORMAT_MPEGVideo)) + { + UINT32 size = sizeof(MPEG1VIDEOINFO), sequence_size; + if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, &sequence_size))) + size += sequence_size; + return size; + } + + if (IsEqualGUID(format_type, &FORMAT_MPEG2Video)) + { + UINT32 size = sizeof(MPEG2VIDEOINFO), sequence_size; + if (SUCCEEDED(IMFMediaType_GetBlobSize(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, &sequence_size))) + size += sequence_size; + return size; + } + + return 0; +} + +static HRESULT init_am_media_type_video_format(AM_MEDIA_TYPE *am_type, IMFMediaType *media_type) +{ + struct uncompressed_video_format *video_format = mf_get_video_format(&am_type->subtype); HRESULT hr; if (IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx)) @@ -3877,65 +4237,87 @@ static HRESULT init_am_media_type_video_format(AM_MEDIA_TYPE *am_type, UINT32 us return MFCreateMFVideoFormatFromMFMediaType(media_type, (MFVIDEOFORMAT **)&am_type->pbFormat, (UINT32 *)&am_type->cbFormat); - if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo) - || IsEqualGUID(&am_type->formattype, &GUID_NULL)) + if (IsEqualGUID(&am_type->formattype, &GUID_NULL)) { - struct uncompressed_video_format *video_format = mf_get_video_format(&am_type->subtype); - VIDEOINFOHEADER *format; + if (IsEqualGUID(&am_type->subtype, &MEDIASUBTYPE_MPEG1Payload) + || IsEqualGUID(&am_type->subtype, &MEDIASUBTYPE_MPEG1Packet)) + am_type->formattype = FORMAT_MPEGVideo; + else if (IsEqualGUID(&am_type->subtype, &MEDIASUBTYPE_MPEG2_VIDEO)) + am_type->formattype = FORMAT_MPEG2Video; + else + am_type->formattype = FORMAT_VideoInfo; + } - am_type->cbFormat = sizeof(*format) + user_size; - if (!(am_type->pbFormat = CoTaskMemAlloc(am_type->cbFormat))) - return E_OUTOFMEMORY; - format = (VIDEOINFOHEADER *)am_type->pbFormat; - memset(format, 0, sizeof(*format)); + am_type->cbFormat = get_am_media_type_video_format_size(&am_type->formattype, media_type); + if (!(am_type->pbFormat = CoTaskMemAlloc(am_type->cbFormat))) + return E_OUTOFMEMORY; + memset(am_type->pbFormat, 0, am_type->cbFormat); - format->bmiHeader.biSize = sizeof(format->bmiHeader) + user_size; - format->bmiHeader.biPlanes = 1; - format->bmiHeader.biBitCount = video_format ? video_format->bpp : 0; + if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo)) + { + VIDEOINFOHEADER *format = (VIDEOINFOHEADER *)am_type->pbFormat; + init_video_info_header(format, &am_type->subtype, media_type); - if (video_format && video_format->compression != -1) - format->bmiHeader.biCompression = video_format->compression; - else - format->bmiHeader.biCompression = am_type->subtype.Data1; - - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &bitrate))) - format->dwBitRate = bitrate; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &bitrate))) - format->dwBitErrorRate = bitrate; - if (SUCCEEDED(IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &frame_rate)) && (frame_rate >> 32)) - format->AvgTimePerFrame = round(10000000. * (UINT32)frame_rate / (frame_rate >> 32)); - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &sample_size))) - format->bmiHeader.biSizeImage = sample_size; - - if (SUCCEEDED(IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &frame_size))) - { - BOOL bottom_up = format->bmiHeader.biCompression == BI_RGB || format->bmiHeader.biCompression == BI_BITFIELDS; + if (am_type->cbFormat > sizeof(*format) && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, + (BYTE *)(format + 1), am_type->cbFormat - sizeof(*format), NULL))) + return hr; + format->bmiHeader.biSize += am_type->cbFormat - sizeof(*format); - if (FAILED(IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, (UINT32 *)&width))) - width = (frame_size >> 32) * (bottom_up ? -1 : 1); - else if (video_format) - width /= video_format->bpp / 8; - height = (UINT32)frame_size; + am_type->subtype = get_am_subtype_for_mf_subtype(am_type->subtype); + am_type->bFixedSizeSamples = !!video_format; + am_type->bTemporalCompression = !video_format; + } + else if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo2)) + { + VIDEOINFOHEADER2 *format = (VIDEOINFOHEADER2 *)am_type->pbFormat; + init_video_info_header2(format, &am_type->subtype, media_type); - format->bmiHeader.biWidth = abs(width); - format->bmiHeader.biHeight = height * (bottom_up && width >= 0 ? -1 : 1); + if (am_type->cbFormat > sizeof(*format) && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, + (BYTE *)(format + 1), am_type->cbFormat - sizeof(*format), NULL))) + return hr; + format->bmiHeader.biSize += am_type->cbFormat - sizeof(*format); - if (SUCCEEDED(MFCalculateImageSize(&am_type->subtype, abs(width), height, &image_size))) - format->bmiHeader.biSizeImage = image_size; - } + am_type->subtype = get_am_subtype_for_mf_subtype(am_type->subtype); + am_type->bFixedSizeSamples = !!video_format; + am_type->bTemporalCompression = !video_format; + } + else if (IsEqualGUID(&am_type->formattype, &FORMAT_MPEGVideo)) + { + MPEG1VIDEOINFO *format = (MPEG1VIDEOINFO *)am_type->pbFormat; - if (user_size && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, - (BYTE *)(format + 1), user_size, NULL))) + init_video_info_header(&format->hdr, &am_type->subtype, media_type); + format->hdr.bmiHeader.biSize = 0; + + format->dwStartTimeCode = media_type_get_uint32(media_type, &MF_MT_MPEG_START_TIME_CODE); + + if (am_type->cbFormat > sizeof(*format) && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, + format->bSequenceHeader, am_type->cbFormat - sizeof(*format), NULL))) return hr; + format->cbSequenceHeader = am_type->cbFormat - sizeof(*format); - am_type->formattype = FORMAT_VideoInfo; am_type->subtype = get_am_subtype_for_mf_subtype(am_type->subtype); + am_type->bFixedSizeSamples = !!video_format; + am_type->bTemporalCompression = !video_format; } - else if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo2)) + else if (IsEqualGUID(&am_type->formattype, &FORMAT_MPEG2Video)) { - FIXME("Not implemented!\n"); - am_type->formattype = GUID_NULL; - return E_NOTIMPL; + MPEG2VIDEOINFO *format = (MPEG2VIDEOINFO *)am_type->pbFormat; + + init_video_info_header2(&format->hdr, &am_type->subtype, media_type); + + format->dwStartTimeCode = media_type_get_uint32(media_type, &MF_MT_MPEG_START_TIME_CODE); + format->dwProfile = media_type_get_uint32(media_type, &MF_MT_MPEG2_PROFILE); + format->dwLevel = media_type_get_uint32(media_type, &MF_MT_MPEG2_LEVEL); + format->dwFlags = media_type_get_uint32(media_type, &MF_MT_MPEG2_FLAGS); + + if (am_type->cbFormat > sizeof(*format) && FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, + (BYTE *)format->dwSequenceHeader, am_type->cbFormat - sizeof(*format), NULL))) + return hr; + format->cbSequenceHeader = am_type->cbFormat - sizeof(*format); + + am_type->subtype = get_am_subtype_for_mf_subtype(am_type->subtype); + am_type->bFixedSizeSamples = !!video_format; + am_type->bTemporalCompression = !video_format; } else { @@ -3951,7 +4333,6 @@ static HRESULT init_am_media_type_video_format(AM_MEDIA_TYPE *am_type, UINT32 us */ HRESULT WINAPI MFInitAMMediaTypeFromMFMediaType(IMFMediaType *media_type, GUID format, AM_MEDIA_TYPE *am_type) { - UINT32 value, user_size; HRESULT hr; TRACE("%p, %s, %p.\n", media_type, debugstr_mf_guid(&format), am_type); @@ -3963,19 +4344,14 @@ HRESULT WINAPI MFInitAMMediaTypeFromMFMediaType(IMFMediaType *media_type, GUID f || FAILED(hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &am_type->subtype))) goto done; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value))) - am_type->bFixedSizeSamples = value; - if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value))) - am_type->lSampleSize = value; - - if (FAILED(hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, NULL, 0, &user_size)) - && hr != E_NOT_SUFFICIENT_BUFFER) - user_size = 0; + am_type->bTemporalCompression = !media_type_get_uint32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT); + am_type->bFixedSizeSamples = media_type_get_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES); + am_type->lSampleSize = media_type_get_uint32(media_type, &MF_MT_SAMPLE_SIZE); if (IsEqualGUID(&am_type->majortype, &MFMediaType_Audio)) - hr = init_am_media_type_audio_format(am_type, user_size, media_type); + hr = init_am_media_type_audio_format(am_type, media_type); else if (IsEqualGUID(&am_type->majortype, &MFMediaType_Video)) - hr = init_am_media_type_video_format(am_type, user_size, media_type); + hr = init_am_media_type_video_format(am_type, media_type); else { FIXME("Not implemented!\n"); @@ -4006,48 +4382,55 @@ HRESULT WINAPI MFInitMediaTypeFromAMMediaType(IMFMediaType *media_type, const AM if (IsEqualGUID(&am_type->majortype, &MEDIATYPE_Video)) { - if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo)) + const GUID *subtype = get_mf_subtype_for_am_subtype(&am_type->subtype); + + if (am_type->cbFormat && !am_type->pbFormat) + hr = E_INVALIDARG; + else if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo) + && am_type->cbFormat >= sizeof(VIDEOINFOHEADER)) + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, (VIDEOINFOHEADER *)am_type->pbFormat, am_type->cbFormat, subtype); + else if (IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo2) + && am_type->cbFormat >= sizeof(VIDEOINFOHEADER2)) + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, (VIDEOINFOHEADER2 *)am_type->pbFormat, am_type->cbFormat, subtype); + else if (IsEqualGUID(&am_type->formattype, &FORMAT_MPEGVideo) + && am_type->cbFormat >= sizeof(MPEG1VIDEOINFO)) + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, (MPEG1VIDEOINFO *)am_type->pbFormat, am_type->cbFormat, subtype); + else if (IsEqualGUID(&am_type->formattype, &FORMAT_MPEG2Video) + && am_type->cbFormat >= sizeof(MPEG2VIDEOINFO)) + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, (MPEG2VIDEOINFO *)am_type->pbFormat, am_type->cbFormat, subtype); + else { - const VIDEOINFOHEADER *vih = (const VIDEOINFOHEADER *)am_type->pbFormat; - const GUID *subtype; - DWORD height; - LONG stride; - - subtype = get_mf_subtype_for_am_subtype(&am_type->subtype); - height = abs(vih->bmiHeader.biHeight); - - mediatype_set_guid(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video, &hr); - mediatype_set_guid(media_type, &MF_MT_SUBTYPE, subtype, &hr); - mediatype_set_uint64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, 1, 1, &hr); - mediatype_set_uint32(media_type, &MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive, &hr); - mediatype_set_uint64(media_type, &MF_MT_FRAME_SIZE, vih->bmiHeader.biWidth, height, &hr); - mediatype_set_uint32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, 1, &hr); - - if (SUCCEEDED(mf_get_stride_for_bitmap_info_header(subtype->Data1, &vih->bmiHeader, &stride))) - { - mediatype_set_uint32(media_type, &MF_MT_DEFAULT_STRIDE, stride, &hr); - mediatype_set_uint32(media_type, &MF_MT_SAMPLE_SIZE, abs(stride) * height, &hr); - mediatype_set_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, 1, &hr); - } - else - { - if (am_type->bFixedSizeSamples) - mediatype_set_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, 1, &hr); - if (am_type->lSampleSize) - mediatype_set_uint32(media_type, &MF_MT_SAMPLE_SIZE, am_type->lSampleSize, &hr); - } - - return hr; + FIXME("Unsupported format type %s / size %ld.\n", debugstr_guid(&am_type->formattype), am_type->cbFormat); + return E_NOTIMPL; } + } + else if (IsEqualGUID(&am_type->majortype, &MEDIATYPE_Audio)) + { + if (am_type->cbFormat && !am_type->pbFormat) + hr = E_INVALIDARG; + else if (IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx) + && am_type->cbFormat >= sizeof(WAVEFORMATEX)) + hr = MFInitMediaTypeFromWaveFormatEx(media_type, (WAVEFORMATEX *)am_type->pbFormat, am_type->cbFormat); else { - FIXME("Unsupported format type %s.\n", debugstr_guid(&am_type->formattype)); + FIXME("Unsupported format type %s / size %ld.\n", debugstr_guid(&am_type->formattype), am_type->cbFormat); + return E_NOTIMPL; } } else + { FIXME("Unsupported major type %s.\n", debugstr_guid(&am_type->majortype)); + return E_NOTIMPL; + } - return E_NOTIMPL; + if (!am_type->bTemporalCompression && FAILED(IMFMediaType_GetItem(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, NULL))) + mediatype_set_uint32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, 1, &hr); + if (am_type->bFixedSizeSamples && FAILED(IMFMediaType_GetItem(media_type, &MF_MT_FIXED_SIZE_SAMPLES, NULL))) + mediatype_set_uint32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, 1, &hr); + if (am_type->lSampleSize && FAILED(IMFMediaType_GetItem(media_type, &MF_MT_SAMPLE_SIZE, NULL))) + mediatype_set_uint32(media_type, &MF_MT_SAMPLE_SIZE, am_type->lSampleSize, &hr); + + return hr; } /*********************************************************************** diff --git a/dlls/mfplat/mfplat.spec b/dlls/mfplat/mfplat.spec index 8a9750a7eb8..d61e18491d3 100644 --- a/dlls/mfplat/mfplat.spec +++ b/dlls/mfplat/mfplat.spec @@ -76,7 +76,7 @@ @ stdcall MFCreateTransformActivate(ptr) @ stub MFCreateURLFromPath @ stub MFCreateUdpSockets -@ stub MFCreateVideoMediaType +@ stdcall MFCreateVideoMediaType(ptr ptr) @ stub MFCreateVideoMediaTypeFromBitMapInfoHeader @ stub MFCreateVideoMediaTypeFromBitMapInfoHeaderEx @ stdcall MFCreateVideoMediaTypeFromSubtype(ptr ptr) @@ -120,10 +120,10 @@ @ stdcall MFInitAMMediaTypeFromMFMediaType(ptr int128 ptr) @ stdcall MFInitAttributesFromBlob(ptr ptr long) @ stdcall MFInitMediaTypeFromAMMediaType(ptr ptr) -@ stub MFInitMediaTypeFromMFVideoFormat -@ stub MFInitMediaTypeFromMPEG1VideoInfo -@ stub MFInitMediaTypeFromMPEG2VideoInfo -@ stub MFInitMediaTypeFromVideoInfoHeader2 +@ stdcall MFInitMediaTypeFromMFVideoFormat(ptr ptr long) +@ stdcall MFInitMediaTypeFromMPEG1VideoInfo(ptr ptr long ptr) +@ stdcall MFInitMediaTypeFromMPEG2VideoInfo(ptr ptr long ptr) +@ stdcall MFInitMediaTypeFromVideoInfoHeader2(ptr ptr long ptr) @ stdcall MFInitMediaTypeFromVideoInfoHeader(ptr ptr long ptr) @ stdcall MFInitMediaTypeFromWaveFormatEx(ptr ptr long) @ stub MFInitVideoFormat diff --git a/dlls/mfplat/sample.c b/dlls/mfplat/sample.c index 8e489d22acd..4b9151b56b2 100644 --- a/dlls/mfplat/sample.c +++ b/dlls/mfplat/sample.c @@ -1488,7 +1488,7 @@ static HRESULT sample_allocator_initialize(struct sample_allocator *allocator, u unsigned int i, value; GUID major, subtype; UINT64 frame_size; - D3D11_USAGE usage; + UINT32 usage; HRESULT hr; if (FAILED(hr = IMFMediaType_GetMajorType(media_type, &major))) diff --git a/dlls/mfplat/tests/mfplat.c b/dlls/mfplat/tests/mfplat.c index 01290763003..75fe480efc9 100644 --- a/dlls/mfplat/tests/mfplat.c +++ b/dlls/mfplat/tests/mfplat.c @@ -67,13 +67,16 @@ DEFINE_GUID(DUMMY_GUID3, 0x12345678,0x1234,0x1234,0x23,0x23,0x23,0x23,0x23,0x23, extern const CLSID CLSID_FileSchemePlugin; DEFINE_MEDIATYPE_GUID(MEDIASUBTYPE_Base,0); +DEFINE_GUID(MEDIASUBTYPE_ABGR32,D3DFMT_A8B8G8R8,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB1, D3DFMT_A1); DEFINE_MEDIATYPE_GUID(MFVideoFormat_RGB4, MAKEFOURCC('4','P','x','x')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32, D3DFMT_A8B8G8R8); DEFINE_MEDIATYPE_GUID(MFVideoFormat_ARGB1555, D3DFMT_A1R5G5B5); DEFINE_MEDIATYPE_GUID(MFVideoFormat_ARGB4444, D3DFMT_A4R4G4B4); /* SDK MFVideoFormat_A2R10G10B10 uses D3DFMT_A2B10G10R10, let's name it the other way */ DEFINE_MEDIATYPE_GUID(MFVideoFormat_A2B10G10R10, D3DFMT_A2R10G10B10); +DEFINE_MEDIATYPE_GUID(MFAudioFormat_RAW_AAC,WAVE_FORMAT_RAW_AAC1); DEFINE_MEDIATYPE_GUID(MEDIASUBTYPE_h264,MAKEFOURCC('h','2','6','4')); DEFINE_MEDIATYPE_GUID(MEDIASUBTYPE_MP3,WAVE_FORMAT_MPEGLAYER3); @@ -4736,6 +4739,9 @@ image_size_tests[] = { &MFVideoFormat_ARGB32, 3, 5, 60, 0, 320, 60, 64 }, { &MFVideoFormat_ARGB32, 1, 1, 4, 0, 64, 4, 64 }, { &MFVideoFormat_ARGB32, 320, 240, 307200, 0, 307200, 307200, 1280 }, + { &MFVideoFormat_ABGR32, 3, 5, 60, 0, 320, 60, 64 }, + { &MFVideoFormat_ABGR32, 1, 1, 4, 0, 64, 4, 64 }, + { &MFVideoFormat_ABGR32, 320, 240, 307200, 0, 307200, 307200, 1280 }, { &MFVideoFormat_A2R10G10B10, 3, 5, 60, 0, 320, 60, 64 }, { &MFVideoFormat_A2R10G10B10, 1, 1, 4, 0, 64, 4, 64 }, { &MFVideoFormat_A2R10G10B10, 320, 240, 307200, 0, 307200, 307200, 1280 }, @@ -4898,12 +4904,16 @@ static void test_MFCalculateImageSize(void) /* Those are supported since Win10. */ BOOL is_broken = IsEqualGUID(ptr->subtype, &MFVideoFormat_A16B16G16R16F) || - IsEqualGUID(ptr->subtype, &MFVideoFormat_A2R10G10B10); + IsEqualGUID(ptr->subtype, &MFVideoFormat_A2R10G10B10) || + IsEqualGUID(ptr->subtype, &MFVideoFormat_ABGR32); hr = MFCalculateImageSize(ptr->subtype, ptr->width, ptr->height, &size); - ok(hr == S_OK || (is_broken && hr == E_INVALIDARG), "%u: failed to calculate image size, hr %#lx.\n", i, hr); - ok(size == ptr->size, "%u: unexpected image size %u, expected %u. Size %u x %u, format %s.\n", i, size, ptr->size, - ptr->width, ptr->height, wine_dbgstr_an((char *)&ptr->subtype->Data1, 4)); + ok(hr == S_OK || broken(is_broken && hr == E_INVALIDARG), "%u: failed to calculate image size, hr %#lx.\n", i, hr); + if (hr == S_OK) + { + ok(size == ptr->size, "%u: unexpected image size %u, expected %u. Size %u x %u, format %s.\n", i, size, ptr->size, + ptr->width, ptr->height, wine_dbgstr_an((char *)&ptr->subtype->Data1, 4)); + } } } @@ -5075,7 +5085,8 @@ static void test_attributes_serialization(void) static void test_wrapped_media_type(void) { IMFMediaType *mediatype, *mediatype2; - UINT32 count, type; + MF_ATTRIBUTE_TYPE type; + UINT32 count; HRESULT hr; GUID guid; @@ -6206,6 +6217,8 @@ static void test_MFGetStrideForBitmapInfoHeader(void) { &MFVideoFormat_RGB32, 1, -4 }, { &MFVideoFormat_ARGB32, 3, -12 }, { &MFVideoFormat_ARGB32, 1, -4 }, + { &MFVideoFormat_ABGR32, 3, -12 }, + { &MFVideoFormat_ABGR32, 1, -4 }, { &MFVideoFormat_A2R10G10B10, 3, -12 }, { &MFVideoFormat_A2R10G10B10, 1, -4 }, { &MFVideoFormat_A2B10G10R10, 3, -12 }, @@ -6785,8 +6798,10 @@ static void test_MFCreateMediaBufferFromMediaType(void) IMFMediaType *media_type, *media_type2; unsigned int i, alignment; IMFMediaBuffer *buffer; + IMF2DBuffer *buffer_2d; DWORD length, max; BYTE *data; + LONG pitch; HRESULT hr; if (!pMFCreateMediaBufferFromMediaType) @@ -6801,6 +6816,19 @@ static void test_MFCreateMediaBufferFromMediaType(void) hr = MFCreateMediaType(&media_type); ok(hr == S_OK, "Failed to create media type, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &GUID_NULL); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 16, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_GetMaxLength(buffer, &length); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(length == 16, "Got length %#lx.\n", length); + IMFMediaBuffer_Release(buffer); + hr = MFCreateMediaType(&media_type2); ok(hr == S_OK, "Failed to create media type, hr %#lx.\n", hr); @@ -6861,8 +6889,139 @@ static void test_MFCreateMediaBufferFromMediaType(void) IMFMediaBuffer_Release(buffer); } - IMFMediaType_Release(media_type); IMFMediaType_Release(media_type2); + + + hr = IMFMediaType_DeleteAllItems(media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + /* MF_MT_SUBTYPE is required unless min length is provided */ + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 16, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_Lock(buffer, &data, &max, &length); + ok(hr == S_OK, "Failed to lock, hr %#lx.\n", hr); + ok(max == 16, "Unexpected max length.\n"); + ok(length == 0, "Unexpected length.\n"); + ok(!((uintptr_t)data & 0xf), "%u: data at %p is misaligned.\n", i, data); + hr = IMFMediaBuffer_Unlock(buffer); + ok(hr == S_OK, "Failed to unlock, hr %#lx.\n", hr); + IMFMediaBuffer_Release(buffer); + + /* MF_MT_FRAME_SIZE is required unless min length is provided */ + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 16, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_Lock(buffer, &data, &max, &length); + ok(hr == S_OK, "Failed to lock, hr %#lx.\n", hr); + ok(max == 16, "Unexpected max length.\n"); + ok(length == 0, "Unexpected length.\n"); + ok(!((uintptr_t)data & 0xf), "%u: data at %p is misaligned.\n", i, data); + hr = IMFMediaBuffer_Unlock(buffer); + ok(hr == S_OK, "Failed to unlock, hr %#lx.\n", hr); + IMFMediaBuffer_Release(buffer); + + /* MF_MT_SAMPLE_SIZE / MF_MT_FIXED_SIZE_SAMPLES / MF_MT_COMPRESSED don't have any effect */ + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 1024); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, 1); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_COMPRESSED, 0); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + /* MF_MT_FRAME_SIZE forces the buffer size, regardless of min length */ + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)16 << 32 | 32); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_Lock(buffer, &data, &max, &length); + ok(hr == S_OK, "Failed to lock, hr %#lx.\n", hr); + ok(max == 2048, "Unexpected max length.\n"); + ok(length == 2048, "Unexpected length.\n"); + ok(!((uintptr_t)data & 0xf), "%u: data at %p is misaligned.\n", i, data); + hr = IMFMediaBuffer_Unlock(buffer); + ok(hr == S_OK, "Failed to unlock, hr %#lx.\n", hr); + + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer, (void **)&buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMF2DBuffer_Lock2D(buffer_2d, &data, &pitch); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(pitch == 64, "got pitch %ld.\n", pitch); + hr = IMF2DBuffer_Unlock2D(buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMF2DBuffer_Release(buffer_2d); + + IMFMediaBuffer_Release(buffer); + + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 4096, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_Lock(buffer, &data, &max, &length); + ok(hr == S_OK, "Failed to lock, hr %#lx.\n", hr); + ok(max == 2048, "Unexpected max length.\n"); + ok(length == 2048, "Unexpected length.\n"); + ok(!((uintptr_t)data & 0xf), "%u: data at %p is misaligned.\n", i, data); + hr = IMFMediaBuffer_Unlock(buffer); + ok(hr == S_OK, "Failed to unlock, hr %#lx.\n", hr); + + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer, (void **)&buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMF2DBuffer_Lock2D(buffer_2d, &data, &pitch); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(pitch == 64, "got pitch %ld.\n", pitch); + hr = IMF2DBuffer_Unlock2D(buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMF2DBuffer_Release(buffer_2d); + + IMFMediaBuffer_Release(buffer); + + /* MF_MT_DEFAULT_STRIDE is ignored as well */ + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -256); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaBuffer_Lock(buffer, &data, &max, &length); + ok(hr == S_OK, "Failed to lock, hr %#lx.\n", hr); + ok(max == 2048, "Unexpected max length.\n"); + ok(length == 2048, "Unexpected length.\n"); + ok(!((uintptr_t)data & 0xf), "%u: data at %p is misaligned.\n", i, data); + hr = IMFMediaBuffer_Unlock(buffer); + ok(hr == S_OK, "Failed to unlock, hr %#lx.\n", hr); + + hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMF2DBuffer, (void **)&buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMF2DBuffer_Lock2D(buffer_2d, &data, &pitch); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(pitch == -64, "got pitch %ld.\n", pitch); + hr = IMF2DBuffer_Unlock2D(buffer_2d); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMF2DBuffer_Release(buffer_2d); + + IMFMediaBuffer_Release(buffer); + + hr = IMFMediaType_DeleteItem(media_type, &MF_MT_FRAME_SIZE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + /* MF_MT_FRAME_SIZE doesn't work with compressed formats */ + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_H264); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)16 << 32 | 32); + ok(hr == S_OK, "Failed to set attribute, hr %#lx.\n", hr); + hr = pMFCreateMediaBufferFromMediaType(media_type, 0, 0, 0, &buffer); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + IMFMediaType_Release(media_type); } static void validate_media_type(IMFMediaType *mediatype, const WAVEFORMATEX *format) @@ -6991,6 +7150,15 @@ static void test_MFInitMediaTypeFromWaveFormatEx(void) { WAVE_FORMAT_WMASPDIF }, }; + static const BYTE aac_codec_data[] = + { + 0x12, 0x00, + 0x34, 0x00, + 0x00, 0x00, + 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + 0x12, 0x08, + }; UINT8 buff[1024]; WAVEFORMATEXTENSIBLE waveformatext; MPEGLAYER3WAVEFORMAT mp3format; @@ -6998,6 +7166,7 @@ static void test_MFInitMediaTypeFromWaveFormatEx(void) HEAACWAVEFORMAT aacformat; IMFMediaType *mediatype; unsigned int i, size; + WAVEFORMATEX *wfx; UINT32 value; HRESULT hr; @@ -7087,6 +7256,15 @@ static void test_MFInitMediaTypeFromWaveFormatEx(void) ok(size == aacformat.wfInfo.wfx.cbSize, "Unexpected size %u.\n", size); ok(!memcmp(buff, (WAVEFORMATEX *)&aacformat + 1, size), "Unexpected user data.\n"); + /* check that we get an HEAACWAVEFORMAT by default */ + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&wfx, &size, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(wfx->wFormatTag == WAVE_FORMAT_MPEG_HEAAC, "got wFormatTag %#x\n", wfx->wFormatTag); + ok(wfx->cbSize == sizeof(HEAACWAVEFORMAT) - sizeof(WAVEFORMATEX), "got cbSize %u\n", wfx->cbSize); + ok(!memcmp(wfx + 1, &aacformat.wfInfo.wfx + 1, aacformat.wfInfo.wfx.cbSize), "Unexpected user data.\n"); + CoTaskMemFree(wfx); + + /* MFWaveFormatExConvertFlag_ForceExtensible can force a WAVEFORMATEXTENSIBLE */ hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&format, &size, MFWaveFormatExConvertFlag_ForceExtensible); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE, "got wFormatTag %#x\n", format->Format.wFormatTag); @@ -7098,6 +7276,56 @@ static void test_MFInitMediaTypeFromWaveFormatEx(void) ok(!memcmp(format + 1, &aacformat.wfInfo.wfx + 1, aacformat.wfInfo.wfx.cbSize), "Unexpected user data.\n"); CoTaskMemFree(format); + /* adding more channels has no immediate effect */ + hr = IMFMediaType_SetUINT32(mediatype, &MF_MT_AUDIO_NUM_CHANNELS, 6); + ok(hr == S_OK, "Failed to get attribute, hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(mediatype, &MF_MT_AUDIO_CHANNEL_MASK, 63); + ok(hr == S_OK, "Failed to get attribute, hr %#lx.\n", hr); + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&wfx, &size, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(wfx->wFormatTag == WAVE_FORMAT_MPEG_HEAAC, "got wFormatTag %#x\n", wfx->wFormatTag); + ok(wfx->cbSize == sizeof(HEAACWAVEFORMAT) - sizeof(WAVEFORMATEX), "got cbSize %u\n", wfx->cbSize); + ok(!memcmp(wfx + 1, &aacformat.wfInfo.wfx + 1, aacformat.wfInfo.wfx.cbSize), "Unexpected user data.\n"); + CoTaskMemFree(wfx); + + /* but adding MF_MT_AUDIO_SAMPLES_PER_BLOCK as well forces the WAVEFORMATEXTENSIBLE format */ + hr = IMFMediaType_SetUINT32(mediatype, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, 4); + ok(hr == S_OK, "Failed to get attribute, hr %#lx.\n", hr); + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&format, &size, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(format->Format.wFormatTag == WAVE_FORMAT_EXTENSIBLE, "got wFormatTag %#x\n", format->Format.wFormatTag); + ok(format->Format.cbSize == aacformat.wfInfo.wfx.cbSize + sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX), + "got cbSize %u\n", format->Format.cbSize); + ok(IsEqualGUID(&format->SubFormat, &MFAudioFormat_AAC), "got SubFormat %s\n", debugstr_guid(&format->SubFormat)); + ok(format->dwChannelMask == 63, "got dwChannelMask %#lx\n", format->dwChannelMask); + ok(format->Samples.wSamplesPerBlock == 4, "got wSamplesPerBlock %u\n", format->Samples.wSamplesPerBlock); + ok(!memcmp(format + 1, &aacformat.wfInfo.wfx + 1, aacformat.wfInfo.wfx.cbSize), "Unexpected user data.\n"); + + /* test initializing media type from an WAVE_FORMAT_EXTENSIBLE AAC format */ + IMFMediaType_DeleteAllItems(mediatype); + hr = MFInitMediaTypeFromWaveFormatEx(mediatype, (WAVEFORMATEX *)format, sizeof(WAVEFORMATEX) + format->Format.cbSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + CoTaskMemFree(format); + + value = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, &value); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Failed to get attribute, hr %#lx.\n", hr); + todo_wine + ok(value == 0xdeadbeef, "Unexpected AAC_AUDIO_PROFILE_LEVEL_INDICATION %u.\n", value); + value = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_PAYLOAD_TYPE, &value); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Failed to get attribute, hr %#lx.\n", hr); + todo_wine + ok(value == 0xdeadbeef, "Unexpected AAC_PAYLOAD_TYPE %u.\n", value); + + hr = IMFMediaType_GetBlob(mediatype, &MF_MT_USER_DATA, buff, sizeof(buff), &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(size == aacformat.wfInfo.wfx.cbSize, "Unexpected size %u.\n", size); + ok(!memcmp(buff, (WAVEFORMATEX *)&aacformat + 1, size), "Unexpected user data.\n"); + + /* test with invalid format size */ aacformat.wfInfo.wfx.cbSize = 1; hr = IMFMediaType_SetBlob(mediatype, &MF_MT_USER_DATA, buff, aacformat.wfInfo.wfx.cbSize); @@ -7108,11 +7336,90 @@ static void test_MFInitMediaTypeFromWaveFormatEx(void) ok(format->Format.cbSize == aacformat.wfInfo.wfx.cbSize + sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX), "got cbSize %u\n", format->Format.cbSize); ok(IsEqualGUID(&format->SubFormat, &MFAudioFormat_AAC), "got SubFormat %s\n", debugstr_guid(&format->SubFormat)); - ok(format->dwChannelMask == 3, "got dwChannelMask %#lx\n", format->dwChannelMask); - ok(format->Samples.wSamplesPerBlock == 0, "got wSamplesPerBlock %u\n", format->Samples.wSamplesPerBlock); + ok(format->dwChannelMask == 63, "got dwChannelMask %#lx\n", format->dwChannelMask); + todo_wine + ok(format->Samples.wSamplesPerBlock == 4, "got wSamplesPerBlock %u\n", format->Samples.wSamplesPerBlock); ok(!memcmp(format + 1, &aacformat.wfInfo.wfx + 1, aacformat.wfInfo.wfx.cbSize), "Unexpected user data.\n"); CoTaskMemFree(format); + IMFMediaType_DeleteAllItems(mediatype); + + + /* check that HEAACWAVEFORMAT extra fields are copied directly from MF_MT_USER_DATA */ + aacformat.wfInfo.wfx.cbSize = sizeof(aacformat) - sizeof(WAVEFORMATEX); + hr = MFInitMediaTypeFromWaveFormatEx(mediatype, (WAVEFORMATEX *)&aacformat, sizeof(aacformat)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_DeleteItem(mediatype, &MF_MT_USER_DATA); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&wfx, &size, 0); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (hr == S_OK) + { + ok(wfx->wFormatTag == WAVE_FORMAT_MPEG_HEAAC, "got wFormatTag %#x\n", wfx->wFormatTag); + ok(wfx->cbSize == 0, "got cbSize %u\n", wfx->cbSize); + CoTaskMemFree(wfx); + } + + hr = IMFMediaType_DeleteItem(mediatype, &MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_DeleteItem(mediatype, &MF_MT_AAC_PAYLOAD_TYPE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(mediatype, &MF_MT_USER_DATA, aac_codec_data, sizeof(aac_codec_data)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&wfx, &size, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(wfx->wFormatTag == WAVE_FORMAT_MPEG_HEAAC, "got wFormatTag %#x\n", wfx->wFormatTag); + ok(wfx->cbSize == sizeof(aac_codec_data), "got cbSize %u\n", wfx->cbSize); + memcpy(&aacformat, wfx, sizeof(aacformat)); + ok(aacformat.wfInfo.wPayloadType == 0x12, "got %u\n", aacformat.wfInfo.wPayloadType); + ok(aacformat.wfInfo.wAudioProfileLevelIndication == 0x34, "got %u\n", aacformat.wfInfo.wAudioProfileLevelIndication); + + hr = MFInitMediaTypeFromWaveFormatEx(mediatype, wfx, sizeof(*wfx) + wfx->cbSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_PAYLOAD_TYPE, &value); + ok(hr == S_OK, "Failed to get attribute, hr %#lx.\n", hr); + ok(value == 0x12, "Unexpected AAC_PAYLOAD_TYPE %u.\n", value); + value = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, &value); + ok(hr == S_OK, "Failed to get attribute, hr %#lx.\n", hr); + ok(value == 0x34, "Unexpected AAC_AUDIO_PROFILE_LEVEL_INDICATION %u.\n", value); + + CoTaskMemFree(wfx); + + + /* check that RAW AAC doesn't have MF_MT_AAC_* attributes */ + aacformat.wfInfo.wfx.cbSize = sizeof(aacformat) - sizeof(WAVEFORMATEX); + hr = MFInitMediaTypeFromWaveFormatEx(mediatype, (WAVEFORMATEX *)&aacformat, sizeof(aacformat)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(mediatype, &MF_MT_SUBTYPE, &MFAudioFormat_RAW_AAC); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_DeleteItem(mediatype, &MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_DeleteItem(mediatype, &MF_MT_AAC_PAYLOAD_TYPE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(mediatype, &MF_MT_USER_DATA, aac_codec_data, sizeof(aac_codec_data)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateWaveFormatExFromMFMediaType(mediatype, (WAVEFORMATEX **)&wfx, &size, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(wfx->wFormatTag == WAVE_FORMAT_RAW_AAC1, "got wFormatTag %#x\n", wfx->wFormatTag); + ok(wfx->cbSize == sizeof(aac_codec_data), "got cbSize %u\n", wfx->cbSize); + + hr = MFInitMediaTypeFromWaveFormatEx(mediatype, wfx, sizeof(*wfx) + wfx->cbSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_PAYLOAD_TYPE, &value); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Failed to get attribute, hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_AAC_AUDIO_PROFILE_LEVEL_INDICATION, &value); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Failed to get attribute, hr %#lx.\n", hr); + + CoTaskMemFree(wfx); + + IMFMediaType_Release(mediatype); } @@ -7120,16 +7427,74 @@ static void test_MFCreateMFVideoFormatFromMFMediaType(void) { MFVIDEOFORMAT *video_format; IMFMediaType *media_type; - UINT32 size; + UINT32 size, expect_size; + PALETTEENTRY palette[64]; + BYTE codec_data[32]; HRESULT hr; + hr = MFCreateMediaType(&media_type); ok(hr == S_OK, "Failed to create media type, hr %#lx.\n", hr); + expect_size = sizeof(*video_format); + hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(!!video_format, "Unexpected format.\n"); + ok(size == expect_size, "Unexpected size %u.\n", size); + ok(video_format->dwSize == size, "Unexpected size %u.\n", size); + CoTaskMemFree(video_format); + + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)123 << 32 | 456); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 123 * 456 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_format->videoInfo.dwWidth == 123, "got %lu.\n", video_format->videoInfo.dwWidth); + ok(video_format->videoInfo.dwHeight == 456, "got %lu.\n", video_format->videoInfo.dwHeight); + ok(video_format->videoInfo.VideoFlags == 0, "got %#I64x.\n", video_format->videoInfo.VideoFlags); + CoTaskMemFree(video_format); + + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 123 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_format->videoInfo.VideoFlags == 0, "got %#I64x.\n", video_format->videoInfo.VideoFlags); + CoTaskMemFree(video_format); + + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -123 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_format->videoInfo.VideoFlags == MFVideoFlag_BottomUpLinearRep, "got %#I64x.\n", video_format->videoInfo.VideoFlags); + CoTaskMemFree(video_format); + + + memset(palette, 0xa5, sizeof(palette)); + expect_size = offsetof(MFVIDEOFORMAT, surfaceInfo.Palette[ARRAY_SIZE(palette) + 1]); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_PALETTE, (BYTE *)palette, sizeof(palette)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(!!video_format, "Unexpected format.\n"); + ok(size == expect_size, "Unexpected size %u.\n", size); + ok(video_format->dwSize == size, "Unexpected size %u.\n", size); + CoTaskMemFree(video_format); + + memset(codec_data, 0xcd, sizeof(codec_data)); + expect_size += sizeof(codec_data); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_USER_DATA, codec_data, sizeof(codec_data)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &video_format, &size); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(!!video_format, "Unexpected format.\n"); - ok(video_format->dwSize == size && size == sizeof(*video_format), "Unexpected size %u.\n", size); + ok(size == expect_size, "Unexpected size %u.\n", size); + ok(video_format->dwSize == size, "Unexpected size %u.\n", size); CoTaskMemFree(video_format); IMFMediaType_Release(media_type); @@ -7138,13 +7503,18 @@ static void test_MFCreateMFVideoFormatFromMFMediaType(void) static void test_MFInitAMMediaTypeFromMFMediaType(void) { static const MFVideoArea aperture = {.OffsetX = {.fract = 1, .value = 2}, .OffsetY = {.fract = 3, .value = 4}, .Area={56,78}}; + static const BYTE dummy_mpeg_sequence[] = {0x04,0x05,0x06,0x07,0x08}; static const BYTE dummy_user_data[] = {0x01,0x02,0x03}; WAVEFORMATEXTENSIBLE *wave_format_ext; VIDEOINFOHEADER *video_info; WAVEFORMATEX *wave_format; - IMFMediaType *media_type; + MPEG1VIDEOINFO *mpeg1_info; + MPEG2VIDEOINFO *mpeg2_info; + IMFMediaType *media_type, *other_type; AM_MEDIA_TYPE am_type; + MFVideoArea *area; + UINT32 value32; HRESULT hr; hr = MFCreateMediaType(&media_type); @@ -7173,6 +7543,9 @@ static void test_MFInitAMMediaTypeFromMFMediaType(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Audio), "got %s.\n", debugstr_guid(&am_type.majortype)); ok(IsEqualGUID(&am_type.subtype, &MFAudioFormat_PCM), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 0, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 1, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); ok(IsEqualGUID(&am_type.formattype, &FORMAT_WaveFormatEx), "got %s.\n", debugstr_guid(&am_type.formattype)); ok(am_type.cbFormat == sizeof(WAVEFORMATEX), "got %lu\n", am_type.cbFormat); CoTaskMemFree(am_type.pbFormat); @@ -7209,23 +7582,49 @@ static void test_MFInitAMMediaTypeFromMFMediaType(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 1, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 0, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); ok(IsEqualGUID(&am_type.formattype, &FORMAT_VideoInfo), "got %s.\n", debugstr_guid(&am_type.formattype)); ok(am_type.cbFormat == sizeof(VIDEOINFOHEADER), "got %lu\n", am_type.cbFormat); CoTaskMemFree(am_type.pbFormat); hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_VideoInfo2, &am_type); - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); - todo_wine ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); - todo_wine ok(IsEqualGUID(&am_type.formattype, &FORMAT_VideoInfo2), "got %s.\n", debugstr_guid(&am_type.formattype)); - todo_wine ok(am_type.cbFormat == sizeof(VIDEOINFOHEADER2), "got %lu\n", am_type.cbFormat); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 1, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 0, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_VideoInfo2), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(VIDEOINFOHEADER2), "got %lu\n", am_type.cbFormat); CoTaskMemFree(am_type.pbFormat); hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MFVideoFormat, &am_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); ok(IsEqualGUID(&am_type.subtype, &MFVideoFormat_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 0, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 1, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); ok(IsEqualGUID(&am_type.formattype, &FORMAT_MFVideoFormat), "got %s.\n", debugstr_guid(&am_type.formattype)); ok(am_type.cbFormat == sizeof(MFVIDEOFORMAT), "got %lu\n", am_type.cbFormat); CoTaskMemFree(am_type.pbFormat); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MPEGVideo, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 1, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 0, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_MPEGVideo), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + CoTaskMemFree(am_type.pbFormat); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MPEG2Video, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_MPEG2Video), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(MPEG2VIDEOINFO), "got %lu\n", am_type.cbFormat); + CoTaskMemFree(am_type.pbFormat); /* test WAVEFORMATEX mapping */ @@ -7564,10 +7963,11 @@ static void test_MFInitAMMediaTypeFromMFMediaType(void) video_info = (VIDEOINFOHEADER *)am_type.pbFormat; ok(video_info->bmiHeader.biWidth == 123, "got %lu\n", video_info->bmiHeader.biWidth); ok(video_info->bmiHeader.biHeight == 456, "got %lu\n", video_info->bmiHeader.biHeight); - ok(video_info->bmiHeader.biSizeImage == 224352, "got %lu\n", video_info->bmiHeader.biSizeImage); + ok(video_info->bmiHeader.biSizeImage == 123 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); CoTaskMemFree(am_type.pbFormat); IMFMediaType_DeleteAllItems(media_type); + /* MF_MT_MINIMUM_DISPLAY_APERTURE has no effect */ hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); @@ -7576,7 +7976,7 @@ static void test_MFInitAMMediaTypeFromMFMediaType(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_SetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 123456); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 12345678); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); @@ -7591,44 +7991,355 @@ static void test_MFInitAMMediaTypeFromMFMediaType(void) ok(video_info->rcTarget.bottom == 0, "got %lu\n", video_info->rcTarget.bottom); ok(video_info->bmiHeader.biWidth == 123, "got %lu\n", video_info->bmiHeader.biWidth); ok(video_info->bmiHeader.biHeight == 456, "got %lu\n", video_info->bmiHeader.biHeight); - ok(video_info->bmiHeader.biSizeImage == 224352, "got %lu\n", video_info->bmiHeader.biSizeImage); + ok(video_info->bmiHeader.biSizeImage == 123 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); CoTaskMemFree(am_type.pbFormat); IMFMediaType_DeleteAllItems(media_type); + /* MF_MT_DEFAULT_STRIDE / MF_MT_FRAME_SIZE mismatch is translated into rcSource / rcTarget */ hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)123 << 32 | 456); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -984); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -246 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 12345678); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, 1); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); video_info = (VIDEOINFOHEADER *)am_type.pbFormat; + ok(video_info->rcSource.left == 0, "got %lu\n", video_info->rcSource.left); + ok(video_info->rcSource.right == 123, "got %lu\n", video_info->rcSource.right); + ok(video_info->rcSource.top == 0, "got %lu\n", video_info->rcSource.top); + ok(video_info->rcSource.bottom == 456, "got %lu\n", video_info->rcSource.bottom); + ok(video_info->rcTarget.left == 0, "got %lu\n", video_info->rcTarget.left); + ok(video_info->rcTarget.right == 123, "got %lu\n", video_info->rcTarget.right); + ok(video_info->rcTarget.top == 0, "got %lu\n", video_info->rcTarget.top); + ok(video_info->rcTarget.bottom == 456, "got %lu\n", video_info->rcTarget.bottom); ok(video_info->bmiHeader.biWidth == 246, "got %lu\n", video_info->bmiHeader.biWidth); ok(video_info->bmiHeader.biHeight == 456, "got %ld\n", video_info->bmiHeader.biHeight); - ok(video_info->bmiHeader.biSizeImage == 448704, "got %lu\n", video_info->bmiHeader.biSizeImage); + ok(video_info->bmiHeader.biSizeImage == 246 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); + CoTaskMemFree(am_type.pbFormat); + + /* positive stride only changes biHeight */ + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 246 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + video_info = (VIDEOINFOHEADER *)am_type.pbFormat; + ok(video_info->rcSource.left == 0, "got %lu\n", video_info->rcSource.left); + ok(video_info->rcSource.right == 123, "got %lu\n", video_info->rcSource.right); + ok(video_info->rcSource.top == 0, "got %lu\n", video_info->rcSource.top); + ok(video_info->rcSource.bottom == 456, "got %lu\n", video_info->rcSource.bottom); + ok(video_info->rcTarget.left == 0, "got %lu\n", video_info->rcTarget.left); + ok(video_info->rcTarget.right == 123, "got %lu\n", video_info->rcTarget.right); + ok(video_info->rcTarget.top == 0, "got %lu\n", video_info->rcTarget.top); + ok(video_info->rcTarget.bottom == 456, "got %lu\n", video_info->rcTarget.bottom); + ok(video_info->bmiHeader.biWidth == 246, "got %lu\n", video_info->bmiHeader.biWidth); + ok(video_info->bmiHeader.biHeight == -456, "got %ld\n", video_info->bmiHeader.biHeight); + ok(video_info->bmiHeader.biSizeImage == 246 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); CoTaskMemFree(am_type.pbFormat); IMFMediaType_DeleteAllItems(media_type); + /* same thing happens with other formats */ hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_NV12); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)123 << 32 | 456); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 984); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 246); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_SAMPLE_SIZE, 12345678); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); video_info = (VIDEOINFOHEADER *)am_type.pbFormat; + ok(video_info->rcSource.left == 0, "got %lu\n", video_info->rcSource.left); + ok(video_info->rcSource.right == 123, "got %lu\n", video_info->rcSource.right); + ok(video_info->rcSource.top == 0, "got %lu\n", video_info->rcSource.top); + ok(video_info->rcSource.bottom == 456, "got %lu\n", video_info->rcSource.bottom); + ok(video_info->rcTarget.left == 0, "got %lu\n", video_info->rcTarget.left); + ok(video_info->rcTarget.right == 123, "got %lu\n", video_info->rcTarget.right); + ok(video_info->rcTarget.top == 0, "got %lu\n", video_info->rcTarget.top); + ok(video_info->rcTarget.bottom == 456, "got %lu\n", video_info->rcTarget.bottom); ok(video_info->bmiHeader.biWidth == 246, "got %lu\n", video_info->bmiHeader.biWidth); - ok(video_info->bmiHeader.biHeight == -456, "got %ld\n", video_info->bmiHeader.biHeight); - ok(video_info->bmiHeader.biSizeImage == 448704, "got %lu\n", video_info->bmiHeader.biSizeImage); + ok(video_info->bmiHeader.biHeight == 456, "got %ld\n", video_info->bmiHeader.biHeight); + ok(video_info->bmiHeader.biSizeImage == 246 * 456 * 3 / 2, "got %lu\n", video_info->bmiHeader.biSizeImage); + CoTaskMemFree(am_type.pbFormat); + IMFMediaType_DeleteAllItems(media_type); + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)123 << 32 | 456); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, -246 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + video_info = (VIDEOINFOHEADER *)am_type.pbFormat; + ok(video_info->bmiHeader.biWidth == 246, "got %lu\n", video_info->bmiHeader.biWidth); + ok(video_info->bmiHeader.biHeight == 456, "got %ld\n", video_info->bmiHeader.biHeight); + ok(video_info->bmiHeader.biSizeImage == 246 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); + CoTaskMemFree(am_type.pbFormat); + IMFMediaType_DeleteAllItems(media_type); + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)123 << 32 | 456); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, 246 * 4); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + video_info = (VIDEOINFOHEADER *)am_type.pbFormat; + ok(video_info->bmiHeader.biWidth == 246, "got %lu\n", video_info->bmiHeader.biWidth); + ok(video_info->bmiHeader.biHeight == -456, "got %ld\n", video_info->bmiHeader.biHeight); + ok(video_info->bmiHeader.biSizeImage == 246 * 456 * 4, "got %lu\n", video_info->bmiHeader.biSizeImage); + CoTaskMemFree(am_type.pbFormat); + IMFMediaType_DeleteAllItems(media_type); + + /* aperture is lost with VIDEOINFOHEADER(2), preserved with MFVIDEOFORMAT */ + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFVideoFormat_NV12); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)1920 << 32 | 1088); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateMediaType(&other_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_VideoInfo, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromAMMediaType(other_type, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetAllocatedBlob(other_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE **)&area, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(other_type); + CoTaskMemFree(am_type.pbFormat); + + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_VideoInfo2, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromAMMediaType(other_type, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetAllocatedBlob(other_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE **)&area, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(other_type); + CoTaskMemFree(am_type.pbFormat); + + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MFVideoFormat, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromAMMediaType(other_type, &am_type); + todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetAllocatedBlob(other_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE **)&area, &value32); + todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (hr == S_OK) CoTaskMemFree(area); + IMFMediaType_DeleteAllItems(other_type); + CoTaskMemFree(am_type.pbFormat); + + IMFMediaType_Release(other_type); + IMFMediaType_DeleteAllItems(media_type); + + + /* MEDIASUBTYPE_MPEG1Packet and MEDIASUBTYPE_MPEG1Payload use FORMAT_MPEGVideo by default */ + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MEDIASUBTYPE_MPEG1Packet); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_MPEG1Packet), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 0, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 1, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_MPEGVideo), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + CoTaskMemFree(am_type.pbFormat); + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MEDIASUBTYPE_MPEG1Payload); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_MPEG1Payload), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(am_type.bFixedSizeSamples == 0, "got %u\n", am_type.bFixedSizeSamples); + ok(am_type.bTemporalCompression == 1, "got %u\n", am_type.bTemporalCompression); + ok(am_type.lSampleSize == 0, "got %lu\n", am_type.lSampleSize); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_MPEGVideo), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->cbSequenceHeader == 0, "got %lu\n", mpeg1_info->cbSequenceHeader); + ok(mpeg1_info->dwStartTimeCode == 0, "got %lu\n", mpeg1_info->dwStartTimeCode); + ok(mpeg1_info->hdr.bmiHeader.biPlanes == 1, "got %u\n", mpeg1_info->hdr.bmiHeader.biPlanes); + ok(mpeg1_info->hdr.bmiHeader.biBitCount == 0, "got %u\n", mpeg1_info->hdr.bmiHeader.biBitCount); + todo_wine ok(mpeg1_info->hdr.bmiHeader.biCompression == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biCompression); + CoTaskMemFree(am_type.pbFormat); + + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_FRAME_SIZE, (UINT64)12 << 32 | 34); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->hdr.bmiHeader.biPlanes == 1, "got %u\n", mpeg1_info->hdr.bmiHeader.biPlanes); + ok(mpeg1_info->hdr.bmiHeader.biBitCount == 0, "got %u\n", mpeg1_info->hdr.bmiHeader.biBitCount); + ok(mpeg1_info->hdr.bmiHeader.biWidth == 12, "got %lu\n", mpeg1_info->hdr.bmiHeader.biWidth); + ok(mpeg1_info->hdr.bmiHeader.biHeight == 34, "got %lu\n", mpeg1_info->hdr.bmiHeader.biHeight); + ok(mpeg1_info->hdr.bmiHeader.biSizeImage == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biSizeImage); + ok(mpeg1_info->hdr.bmiHeader.biXPelsPerMeter == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biXPelsPerMeter); + ok(mpeg1_info->hdr.bmiHeader.biYPelsPerMeter == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biYPelsPerMeter); + CoTaskMemFree(am_type.pbFormat); + + hr = IMFMediaType_SetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, (UINT64)12 << 32 | 34); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->hdr.bmiHeader.biXPelsPerMeter == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biXPelsPerMeter); + ok(mpeg1_info->hdr.bmiHeader.biYPelsPerMeter == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biYPelsPerMeter); + CoTaskMemFree(am_type.pbFormat); + + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, 1234); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->dwStartTimeCode == 1234, "got %lu\n", mpeg1_info->dwStartTimeCode); + CoTaskMemFree(am_type.pbFormat); + + /* MF_MT_USER_DATA is ignored */ + + hr = IMFMediaType_SetBlob(media_type, &MF_MT_USER_DATA, (BYTE *)dummy_user_data, sizeof(dummy_user_data)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->hdr.bmiHeader.biSize == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biSize); + ok(mpeg1_info->cbSequenceHeader == 0, "got %lu\n", mpeg1_info->cbSequenceHeader); + CoTaskMemFree(am_type.pbFormat); + + /* MF_MT_MPEG_SEQUENCE_HEADER is used instead in MPEG1VIDEOINFO */ + + hr = IMFMediaType_SetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, (BYTE *)dummy_mpeg_sequence, sizeof(dummy_mpeg_sequence)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG1VIDEOINFO) + sizeof(dummy_mpeg_sequence), "got %lu\n", am_type.cbFormat); + mpeg1_info = (MPEG1VIDEOINFO *)am_type.pbFormat; + ok(mpeg1_info->hdr.bmiHeader.biSize == 0, "got %lu\n", mpeg1_info->hdr.bmiHeader.biSize); + ok(mpeg1_info->cbSequenceHeader == sizeof(dummy_mpeg_sequence), "got %lu\n", mpeg1_info->cbSequenceHeader); + ok(!memcmp(mpeg1_info->bSequenceHeader, dummy_mpeg_sequence, mpeg1_info->cbSequenceHeader), "got wrong data\n"); + CoTaskMemFree(am_type.pbFormat); + + /* MFVIDEOFORMAT loses MF_MT_MPEG_SEQUENCE_HEADER */ + + hr = IMFMediaType_DeleteItem(media_type, &MF_MT_USER_DATA); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MFVideoFormat, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MFVIDEOFORMAT), "got %lu\n", am_type.cbFormat); + CoTaskMemFree(am_type.pbFormat); + + IMFMediaType_DeleteAllItems(media_type); + + + /* MEDIASUBTYPE_MPEG2_VIDEO uses FORMAT_MPEG2Video by default */ + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MEDIASUBTYPE_MPEG2_VIDEO); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type.majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type.majortype)); + ok(IsEqualGUID(&am_type.subtype, &MEDIASUBTYPE_MPEG2_VIDEO), "got %s.\n", debugstr_guid(&am_type.subtype)); + ok(IsEqualGUID(&am_type.formattype, &FORMAT_MPEG2Video), "got %s.\n", debugstr_guid(&am_type.formattype)); + ok(am_type.cbFormat == sizeof(MPEG2VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg2_info = (MPEG2VIDEOINFO *)am_type.pbFormat; + ok(mpeg2_info->dwStartTimeCode == 0, "got %lu\n", mpeg2_info->dwStartTimeCode); + ok(mpeg2_info->dwProfile == 0, "got %lu\n", mpeg2_info->dwProfile); + ok(mpeg2_info->dwLevel == 0, "got %lu\n", mpeg2_info->dwLevel); + ok(mpeg2_info->dwFlags == 0, "got %lu\n", mpeg2_info->dwFlags); + ok(mpeg2_info->cbSequenceHeader == 0, "got %lu\n", mpeg2_info->cbSequenceHeader); + CoTaskMemFree(am_type.pbFormat); + + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, 1234); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_MPEG2_PROFILE, 6); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_MPEG2_LEVEL, 7); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_MPEG2_FLAGS, 8910); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG2VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg2_info = (MPEG2VIDEOINFO *)am_type.pbFormat; + ok(mpeg2_info->dwStartTimeCode == 1234, "got %lu\n", mpeg2_info->dwStartTimeCode); + ok(mpeg2_info->dwProfile == 6, "got %lu\n", mpeg2_info->dwProfile); + ok(mpeg2_info->dwLevel == 7, "got %lu\n", mpeg2_info->dwLevel); + ok(mpeg2_info->dwFlags == 8910, "got %lu\n", mpeg2_info->dwFlags); + CoTaskMemFree(am_type.pbFormat); + + /* MF_MT_USER_DATA is ignored */ + + hr = IMFMediaType_SetBlob(media_type, &MF_MT_USER_DATA, (BYTE *)dummy_user_data, sizeof(dummy_user_data)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG2VIDEOINFO), "got %lu\n", am_type.cbFormat); + mpeg2_info = (MPEG2VIDEOINFO *)am_type.pbFormat; + ok(mpeg2_info->hdr.bmiHeader.biSize == sizeof(mpeg2_info->hdr.bmiHeader), "got %lu\n", mpeg2_info->hdr.bmiHeader.biSize); + ok(mpeg2_info->cbSequenceHeader == 0, "got %lu\n", mpeg2_info->cbSequenceHeader); + CoTaskMemFree(am_type.pbFormat); + + /* MF_MT_MPEG_SEQUENCE_HEADER is used instead in MPEG2VIDEOINFO */ + + hr = IMFMediaType_SetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, (BYTE *)dummy_mpeg_sequence, sizeof(dummy_mpeg_sequence)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MPEG2VIDEOINFO) + sizeof(dummy_mpeg_sequence), "got %lu\n", am_type.cbFormat); + mpeg2_info = (MPEG2VIDEOINFO *)am_type.pbFormat; + ok(mpeg2_info->hdr.bmiHeader.biSize == sizeof(mpeg2_info->hdr.bmiHeader), "got %lu\n", mpeg2_info->hdr.bmiHeader.biSize); + ok(mpeg2_info->cbSequenceHeader == sizeof(dummy_mpeg_sequence), "got %lu\n", mpeg2_info->cbSequenceHeader); + ok(!memcmp(mpeg2_info->dwSequenceHeader, dummy_mpeg_sequence, mpeg2_info->cbSequenceHeader), "got wrong data\n"); CoTaskMemFree(am_type.pbFormat); + + /* MFVIDEOFORMAT loses MF_MT_MPEG_SEQUENCE_HEADER */ + + hr = IMFMediaType_DeleteItem(media_type, &MF_MT_USER_DATA); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_MFVideoFormat, &am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(am_type.cbFormat == sizeof(MFVIDEOFORMAT), "got %lu\n", am_type.cbFormat); + CoTaskMemFree(am_type.pbFormat); + IMFMediaType_DeleteAllItems(media_type); + IMFMediaType_Release(media_type); } @@ -7668,78 +8379,539 @@ static void test_MFCreateAMMediaTypeFromMFMediaType(void) IMFMediaType_Release(media_type); } -static void test_IMFMediaType_GetRepresentation(void) +static void test_MFInitMediaTypeFromMFVideoFormat(void) { - WAVEFORMATEX wfx = {.wFormatTag = WAVE_FORMAT_PCM}; + static const MFPaletteEntry expect_palette[] = {{{1}},{{2}},{{3}},{{4}},{{5}},{{6}},{{7}},{{8}}}; + static const BYTE expect_user_data[] = {6,5,4,3,2,1}; + MFPaletteEntry palette[ARRAY_SIZE(expect_palette)]; + BYTE user_data[sizeof(expect_user_data)]; + char buffer[sizeof(MFVIDEOFORMAT) + sizeof(palette) + sizeof(user_data)]; + MFVIDEOFORMAT format, *format_buf = (MFVIDEOFORMAT *)buffer; IMFMediaType *media_type; - AM_MEDIA_TYPE *am_type; + MFVideoArea aperture; + UINT32 i, value32; + UINT64 value64; HRESULT hr; + GUID guid; hr = MFCreateMediaType(&media_type); - ok(hr == S_OK, "Failed to create media type, hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); - ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); - ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, NULL, 0); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + memset(&format, 0, sizeof(format)); + format.dwSize = sizeof(format) - 1; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format) - 1); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + format.dwSize = sizeof(format); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format) - 1); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + memset(&guid, 0xcd, sizeof(guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFMediaType_Video), "got %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + IMFMediaType_DeleteAllItems(media_type); + + format.guidFormat = MFVideoFormat_H264; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + memset(&guid, 0xcd, sizeof(guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFVideoFormat_H264), "got %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); - hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + format.guidFormat = MFVideoFormat_RGB32; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM); + memset(&guid, 0xcd, sizeof(guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, FORMAT_VideoInfo, (void **)&am_type); - ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "got %s.\n", debugstr_guid(&guid)); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Audio), "got %s.\n", debugstr_guid(&am_type->majortype)); - ok(IsEqualGUID(&am_type->subtype, &MFAudioFormat_PCM), "got %s.\n", debugstr_guid(&am_type->subtype)); - ok(IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx), "got %s.\n", debugstr_guid(&am_type->formattype)); - ok(am_type->cbFormat == sizeof(WAVEFORMATEX), "got %lu\n", am_type->cbFormat); - hr = IMFMediaType_FreeRepresentation(media_type, IID_IUnknown /* invalid format */, am_type); + ok(value32 == 1, "got %u.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - IMFMediaType_Release(media_type); + ok(value32 == 1, "got %u.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); - hr = MFCreateAudioMediaType(&wfx, (IMFAudioMediaType **)&media_type); + format.videoInfo.dwWidth = -123; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + todo_wine + ok(hr == HRESULT_FROM_WIN32(ERROR_ARITHMETIC_OVERFLOW), "Unexpected hr %#lx.\n", hr); + format.videoInfo.dwWidth = 123; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); - ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, FORMAT_VideoInfo, (void **)&am_type); - ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + format.videoInfo.dwHeight = -456; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + todo_wine + ok(hr == HRESULT_FROM_WIN32(ERROR_ARITHMETIC_OVERFLOW), "Unexpected hr %#lx.\n", hr); + format.videoInfo.dwHeight = 456; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Audio), "got %s.\n", debugstr_guid(&am_type->majortype)); - ok(IsEqualGUID(&am_type->subtype, &MFAudioFormat_PCM), "got %s.\n", debugstr_guid(&am_type->subtype)); - ok(IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx), "got %s.\n", debugstr_guid(&am_type->formattype)); - ok(am_type->cbFormat == sizeof(WAVEFORMATEX), "got %lu\n", am_type->cbFormat); - hr = IMFMediaType_FreeRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, am_type); + value64 = 0xdeadbeef; + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - IMFMediaType_Release(media_type); + ok(value64 == (((UINT64)123 << 32) | 456), "got %#I64x.\n", value64); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 123 * 4, "got %u.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 123 * 4 * 456, "got %u.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); - hr = MFCreateVideoMediaTypeFromSubtype(&MFVideoFormat_RGB32, (IMFVideoMediaType **)&media_type); + /* MFVideoFlag_BottomUpLinearRep flag inverts the stride */ + format.videoInfo.VideoFlags = MFVideoFlag_BottomUpLinearRep; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); - ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, FORMAT_WaveFormatEx, (void **)&am_type); - ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type->majortype)); - ok(IsEqualGUID(&am_type->subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type->subtype)); - ok(IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo), "got %s.\n", debugstr_guid(&am_type->formattype)); - ok(am_type->cbFormat == sizeof(VIDEOINFOHEADER), "got %lu\n", am_type->cbFormat); - hr = IMFMediaType_FreeRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, am_type); + ok(value32 == -123 * 4, "got %u.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 123 * 4 * 456, "got %u.\n", value32); + IMFMediaType_DeleteAllItems(media_type); - IMFMediaType_Release(media_type); -} + /* MFVideoFlag_BottomUpLinearRep flag only works with RGB formats */ + format.guidFormat = MFVideoFormat_H264; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.guidFormat = MFVideoFormat_NV12; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value32 == 124, "got %u.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 124 * 456 * 3 / 2, "got %u.\n", value32); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.PixelAspectRatio.Numerator = 7; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + format.videoInfo.PixelAspectRatio.Denominator = 8; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value64 = 0xdeadbeef; + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == (((UINT64)7 << 32) | 8), "got %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_CHROMA_SITING, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.SourceChromaSubsampling = MFVideoChromaSubsampling_MPEG2; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_CHROMA_SITING, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoChromaSubsampling_MPEG2, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.InterlaceMode = MFVideoInterlace_MixedInterlaceOrProgressive; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoInterlace_MixedInterlaceOrProgressive, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_TRANSFER_FUNCTION, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.TransferFunction = MFVideoTransFunc_709; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_TRANSFER_FUNCTION, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoTransFunc_709, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_PRIMARIES, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.ColorPrimaries = MFVideoPrimaries_BT709; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_PRIMARIES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoPrimaries_BT709, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_YUV_MATRIX, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.TransferMatrix = MFVideoTransferMatrix_BT709; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_YUV_MATRIX, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoTransferMatrix_BT709, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_LIGHTING, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.SourceLighting = MFVideoLighting_bright; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_LIGHTING, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoLighting_bright, "got %u.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.FramesPerSecond.Numerator = 30000; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + format.videoInfo.FramesPerSecond.Denominator = 1001; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value64 = 0xdeadbeef; + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == (((UINT64)30000 << 32) | 1001), "got %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.NominalRange = MFNominalRange_Wide; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFNominalRange_Wide, "got %u.\n", value32); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_GEOMETRIC_APERTURE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.GeometricAperture.OffsetX.value = 1; + format.videoInfo.GeometricAperture.OffsetX.fract = 2; + format.videoInfo.GeometricAperture.OffsetY.value = 3; + format.videoInfo.GeometricAperture.OffsetY.fract = 4; + format.videoInfo.GeometricAperture.Area.cx = -120; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + format.videoInfo.GeometricAperture.Area.cy = -450; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %u.\n", value32); + ok(!memcmp(&format.videoInfo.GeometricAperture, &aperture, sizeof(aperture)), "Unexpected aperture.\n"); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.MinimumDisplayAperture.OffsetX.value = 1; + format.videoInfo.MinimumDisplayAperture.OffsetX.fract = 2; + format.videoInfo.MinimumDisplayAperture.OffsetY.value = 3; + format.videoInfo.MinimumDisplayAperture.OffsetY.fract = 4; + format.videoInfo.MinimumDisplayAperture.Area.cx = 120; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + format.videoInfo.MinimumDisplayAperture.Area.cy = 450; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %u.\n", value32); + ok(!memcmp(&format.videoInfo.MinimumDisplayAperture, &aperture, sizeof(aperture)), "Unexpected aperture.\n"); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_PAN_SCAN_APERTURE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.PanScanAperture.OffsetX.value = 1; + format.videoInfo.PanScanAperture.OffsetX.fract = 2; + format.videoInfo.PanScanAperture.OffsetY.value = 3; + format.videoInfo.PanScanAperture.OffsetY.fract = 4; + format.videoInfo.PanScanAperture.Area.cx = 120; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + format.videoInfo.PanScanAperture.Area.cy = 450; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %u.\n", value32); + ok(!memcmp(&format.videoInfo.PanScanAperture, &aperture, sizeof(aperture)), "Unexpected aperture.\n"); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.VideoFlags |= MFVideoFlag_PanScanEnabled; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAD_CONTROL_FLAGS, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.VideoFlags |= MFVideoFlag_PAD_TO_16x9; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAD_CONTROL_FLAGS, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 2, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SOURCE_CONTENT_HINT, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.VideoFlags |= MFVideoFlag_SrcContentHint16x9; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SOURCE_CONTENT_HINT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DRM_FLAGS, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.videoInfo.VideoFlags |= MFVideoFlag_DigitallyProtected; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DRM_FLAGS, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 2, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.compressedInfo.AvgBitrate = 123456; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 123456, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.compressedInfo.AvgBitErrorRate = 654321; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 654321, "got %u.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MAX_KEYFRAME_SPACING, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + format.compressedInfo.MaxKeyFrameSpacing = -123; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MAX_KEYFRAME_SPACING, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == -123, "got %u.\n", value32); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_PALETTE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + /* any subtype works here */ + format.guidFormat = MFVideoFormat_H264; + format.surfaceInfo.Format = MFVideoFormat_H264.Data1; + hr = MFInitMediaTypeFromMFVideoFormat(media_type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_PALETTE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + *format_buf = format; + for (i = 0; i < ARRAY_SIZE(expect_palette); i++) + format_buf->surfaceInfo.Palette[i] = expect_palette[i]; + format_buf->surfaceInfo.PaletteEntries = ARRAY_SIZE(expect_palette); + + /* format sizes needs to include an extra palette entry */ + format_buf->dwSize = offsetof(MFVIDEOFORMAT, surfaceInfo.Palette[ARRAY_SIZE(expect_palette)]); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, format_buf, sizeof(format)); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + format_buf->dwSize = offsetof(MFVIDEOFORMAT, surfaceInfo.Palette[ARRAY_SIZE(expect_palette) + 1]); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, format_buf, format_buf->dwSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&palette, 0xcd, sizeof(palette)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PALETTE, (BYTE *)&palette, sizeof(palette), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(expect_palette), "got %u.\n", value32); + ok(!memcmp(palette, expect_palette, value32), "Unexpected palette.\n"); + hr = IMFMediaType_GetBlobSize(media_type, &MF_MT_USER_DATA, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFMediaType_DeleteAllItems(media_type); + + memcpy(buffer + format_buf->dwSize, expect_user_data, sizeof(expect_user_data)); + format_buf->dwSize += sizeof(expect_user_data); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, format_buf, format_buf->dwSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&user_data, 0xcd, sizeof(user_data)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, (BYTE *)user_data, sizeof(user_data), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(expect_user_data), "got %u.\n", value32); + ok(!memcmp(user_data, expect_user_data, value32), "Unexpected user data.\n"); + IMFMediaType_DeleteAllItems(media_type); + + /* check that user data follows MFVIDEOFORMAT struct, which is padded, when no palette is present */ + format_buf->surfaceInfo.PaletteEntries = 0; + memmove(format_buf + 1, expect_user_data, sizeof(expect_user_data)); + format_buf->dwSize = sizeof(*format_buf) + sizeof(expect_user_data); + hr = MFInitMediaTypeFromMFVideoFormat(media_type, format_buf, format_buf->dwSize); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value32 = 0xdeadbeef; + memset(&user_data, 0xcd, sizeof(user_data)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_USER_DATA, (BYTE *)user_data, sizeof(user_data), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(expect_user_data), "got %u.\n", value32); + ok(!memcmp(user_data, expect_user_data, value32), "Unexpected user data.\n"); + IMFMediaType_DeleteAllItems(media_type); + + IMFMediaType_Release(media_type); +} + +static void test_IMFMediaType_GetRepresentation(void) +{ + WAVEFORMATEX wfx = {.wFormatTag = WAVE_FORMAT_PCM}; + IMFMediaType *media_type; + AM_MEDIA_TYPE *am_type; + HRESULT hr; + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Failed to create media type, hr %#lx.\n", hr); + + hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); + ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_SetGUID(media_type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, FORMAT_VideoInfo, (void **)&am_type); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Audio), "got %s.\n", debugstr_guid(&am_type->majortype)); + ok(IsEqualGUID(&am_type->subtype, &MFAudioFormat_PCM), "got %s.\n", debugstr_guid(&am_type->subtype)); + ok(IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx), "got %s.\n", debugstr_guid(&am_type->formattype)); + ok(am_type->cbFormat == sizeof(WAVEFORMATEX), "got %lu\n", am_type->cbFormat); + hr = IMFMediaType_FreeRepresentation(media_type, IID_IUnknown /* invalid format */, am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + hr = MFCreateAudioMediaType(&wfx, (IMFAudioMediaType **)&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); + ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, FORMAT_VideoInfo, (void **)&am_type); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Audio), "got %s.\n", debugstr_guid(&am_type->majortype)); + ok(IsEqualGUID(&am_type->subtype, &MFAudioFormat_PCM), "got %s.\n", debugstr_guid(&am_type->subtype)); + ok(IsEqualGUID(&am_type->formattype, &FORMAT_WaveFormatEx), "got %s.\n", debugstr_guid(&am_type->formattype)); + ok(am_type->cbFormat == sizeof(WAVEFORMATEX), "got %lu\n", am_type->cbFormat); + hr = IMFMediaType_FreeRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + hr = MFCreateVideoMediaTypeFromSubtype(&MFVideoFormat_RGB32, (IMFVideoMediaType **)&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, GUID_NULL, (void **)&am_type); + ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, FORMAT_WaveFormatEx, (void **)&am_type); + ok(hr == MF_E_UNSUPPORTED_REPRESENTATION, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, (void **)&am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&am_type->majortype, &MFMediaType_Video), "got %s.\n", debugstr_guid(&am_type->majortype)); + ok(IsEqualGUID(&am_type->subtype, &MEDIASUBTYPE_RGB32), "got %s.\n", debugstr_guid(&am_type->subtype)); + ok(IsEqualGUID(&am_type->formattype, &FORMAT_VideoInfo), "got %s.\n", debugstr_guid(&am_type->formattype)); + ok(am_type->cbFormat == sizeof(VIDEOINFOHEADER), "got %lu\n", am_type->cbFormat); + hr = IMFMediaType_FreeRepresentation(media_type, AM_MEDIA_TYPE_REPRESENTATION, am_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + IMFMediaType_Release(media_type); +} static void test_MFCreateMediaTypeFromRepresentation(void) { @@ -8170,14 +9342,37 @@ static void test_MFAverageTimePerFrameToFrameRate(void) UINT64 avgtime; } frame_rate_tests[] = { + { 60000, 1001, 166863 }, { 60000, 1001, 166833 }, + { 60000, 1001, 166803 }, + + { 30000, 1001, 333697 }, { 30000, 1001, 333667 }, + { 30000, 1001, 333637 }, + + { 24000, 1001, 417218 }, { 24000, 1001, 417188 }, + { 24000, 1001, 417158 }, + + { 60, 1, 166697 }, { 60, 1, 166667 }, + { 60, 1, 166637 }, + + { 30, 1, 333363 }, { 30, 1, 333333 }, + { 30, 1, 333303 }, + + { 50, 1, 200030 }, { 50, 1, 200000 }, + { 50, 1, 199970 }, + + { 25, 1, 400030 }, { 25, 1, 400000 }, + { 25, 1, 399970 }, + + { 24, 1, 416697 }, { 24, 1, 416667 }, + { 24, 1, 416637 }, { 1000000, 25641, 256410 }, { 10000000, 83333, 83333 }, @@ -8214,6 +9409,7 @@ static void test_MFMapDXGIFormatToDX9Format(void) { DXGI_FORMAT dxgi_format; DWORD d3d9_format; + BOOL broken; } formats_map[] = { @@ -8246,6 +9442,7 @@ static void test_MFMapDXGIFormatToDX9Format(void) { DXGI_FORMAT_B8G8R8X8_UNORM, D3DFMT_X8R8G8B8 }, { DXGI_FORMAT_B8G8R8A8_UNORM_SRGB, D3DFMT_A8R8G8B8 }, { DXGI_FORMAT_B8G8R8X8_UNORM_SRGB, D3DFMT_X8R8G8B8 }, + { DXGI_FORMAT_R8G8B8A8_UNORM, D3DFMT_A8B8G8R8, .broken = TRUE /* <= w1064v1507 */ }, { DXGI_FORMAT_AYUV, MAKEFOURCC('A','Y','U','V') }, { DXGI_FORMAT_Y410, MAKEFOURCC('Y','4','1','0') }, { DXGI_FORMAT_Y416, MAKEFOURCC('Y','4','1','6') }, @@ -8274,7 +9471,8 @@ static void test_MFMapDXGIFormatToDX9Format(void) for (i = 0; i < ARRAY_SIZE(formats_map); ++i) { format = pMFMapDXGIFormatToDX9Format(formats_map[i].dxgi_format); - ok(format == formats_map[i].d3d9_format, "Unexpected d3d9 format %#lx, dxgi format %#x.\n", format, formats_map[i].dxgi_format); + ok(format == formats_map[i].d3d9_format || broken(formats_map[i].broken && format == 0), + "Unexpected d3d9 format %#lx, dxgi format %#x.\n", format, formats_map[i].dxgi_format); } } @@ -8284,6 +9482,7 @@ static void test_MFMapDX9FormatToDXGIFormat(void) { DXGI_FORMAT dxgi_format; DWORD d3d9_format; + BOOL broken; } formats_map[] = { @@ -8311,6 +9510,7 @@ static void test_MFMapDX9FormatToDXGIFormat(void) { DXGI_FORMAT_BC3_UNORM, D3DFMT_DXT4 }, { DXGI_FORMAT_B8G8R8A8_UNORM, D3DFMT_A8R8G8B8 }, { DXGI_FORMAT_B8G8R8X8_UNORM, D3DFMT_X8R8G8B8 }, + { DXGI_FORMAT_R8G8B8A8_UNORM, D3DFMT_A8B8G8R8, .broken = TRUE }, { DXGI_FORMAT_AYUV, MAKEFOURCC('A','Y','U','V') }, { DXGI_FORMAT_Y410, MAKEFOURCC('Y','4','1','0') }, { DXGI_FORMAT_Y416, MAKEFOURCC('Y','4','1','6') }, @@ -8339,8 +9539,8 @@ static void test_MFMapDX9FormatToDXGIFormat(void) for (i = 0; i < ARRAY_SIZE(formats_map); ++i) { format = pMFMapDX9FormatToDXGIFormat(formats_map[i].d3d9_format); - ok(format == formats_map[i].dxgi_format, "Unexpected DXGI format %#x, d3d9 format %#lx.\n", - format, formats_map[i].d3d9_format); + ok(format == formats_map[i].dxgi_format || broken(formats_map[i].broken && format == 0), + "Unexpected DXGI format %#x, d3d9 format %#lx.\n", format, formats_map[i].d3d9_format); } } @@ -9750,142 +10950,806 @@ static void check_video_format(const MFVIDEOFORMAT *format, unsigned int width, ok(format->surfaceInfo.PaletteEntries == 0, "Unexpected palette size %lu.\n", format->surfaceInfo.PaletteEntries); } -static void test_MFInitVideoFormat_RGB(void) -{ - static const DWORD formats[] = - { - 0, /* same D3DFMT_X8R8G8B8 */ - D3DFMT_X8R8G8B8, - D3DFMT_R8G8B8, - D3DFMT_A8R8G8B8, - D3DFMT_R5G6B5, - D3DFMT_X1R5G5B5, - D3DFMT_A2B10G10R10, - D3DFMT_A2R10G10B10, - D3DFMT_P8, - D3DFMT_L8, - D3DFMT_YUY2, - D3DFMT_DXT1, - D3DFMT_D16, - D3DFMT_L16, - D3DFMT_A16B16G16R16F, - }; - MFVIDEOFORMAT format; - unsigned int i; - HRESULT hr; +static void test_MFInitVideoFormat_RGB(void) +{ + static const DWORD formats[] = + { + 0, /* same D3DFMT_X8R8G8B8 */ + D3DFMT_X8R8G8B8, + D3DFMT_R8G8B8, + D3DFMT_A8R8G8B8, + D3DFMT_R5G6B5, + D3DFMT_X1R5G5B5, + D3DFMT_A2B10G10R10, + D3DFMT_A2R10G10B10, + D3DFMT_P8, + D3DFMT_L8, + D3DFMT_YUY2, + D3DFMT_DXT1, + D3DFMT_D16, + D3DFMT_L16, + D3DFMT_A16B16G16R16F, + }; + MFVIDEOFORMAT format; + unsigned int i; + HRESULT hr; + + if (!pMFInitVideoFormat_RGB) + { + win_skip("MFInitVideoFormat_RGB is not available.\n"); + return; + } + + hr = pMFInitVideoFormat_RGB(NULL, 64, 32, 0); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + for (i = 0; i < ARRAY_SIZE(formats); ++i) + { + memset(&format, 0, sizeof(format)); + hr = pMFInitVideoFormat_RGB(&format, 64, 32, formats[i]); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (SUCCEEDED(hr)) + check_video_format(&format, 64, 32, formats[i]); + } +} + +static void test_MFCreateVideoMediaTypeFromVideoInfoHeader(void) +{ + IMFVideoMediaType *media_type; + KS_VIDEOINFOHEADER vih; + UINT32 value32; + UINT64 value64; + HRESULT hr; + GUID guid; + + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(NULL, 0, 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); + todo_wine + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + memset(&vih, 0, sizeof(vih)); + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, 0, 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); + todo_wine + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); + todo_wine + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + vih.bmiHeader.biSize = sizeof(vih.bmiHeader); + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); + todo_wine + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biSize = sizeof(vih.bmiHeader); + vih.bmiHeader.biPlanes = 1; + vih.bmiHeader.biWidth = 16; + vih.bmiHeader.biHeight = 32; + vih.bmiHeader.biBitCount = 32; + + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, + MFVideoFlag_AnalogProtected, &GUID_NULL, &media_type); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (FAILED(hr)) return; + IMFVideoMediaType_Release(media_type); + + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, + MFVideoFlag_AnalogProtected, NULL, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFVideoMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFVideoMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)3 << 32 | 2), "Unexpected value %#I64x.\n", value64); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DRM_FLAGS, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoDRMFlag_AnalogProtected, "Unexpected value %#x.\n", value32); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 2048, "Unexpected value %u.\n", value32); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == -64, "Unexpected value %d.\n", value32); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(!!value32, "Unexpected value %#x.\n", value32); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(!!value32, "Unexpected value %#x.\n", value32); + + IMFVideoMediaType_Release(media_type); + + /* Negative height. */ + vih.bmiHeader.biHeight = -32; + hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, + MFVideoFlag_AnalogProtected, NULL, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 64, "Unexpected value %d.\n", value32); + IMFVideoMediaType_Release(media_type); +} + +static void test_MFInitMediaTypeFromVideoInfoHeader(void) +{ + static const MFVideoArea expect_aperture = {.OffsetX = {.value = 1}, .OffsetY = {.value = 2}, .Area = {.cx = 3, .cy = 5}}; + static const RECT source = {1, 2, 4, 7}, target = {3, 2, 12, 9}; + IMFMediaType *media_type; + MFVideoArea aperture; + VIDEOINFOHEADER vih; + UINT32 value32; + UINT64 value64; + HRESULT hr; + GUID guid; + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + memset(&vih, 0, sizeof(vih)); + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, 0, NULL); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &GUID_NULL), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biWidth = 16; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biHeight = -32; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)1 << 32 | 1), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.bmiHeader.biSizeImage = 12345; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwBitRate = 678910; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value32 == 678910, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwBitErrorRate = 11121314; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value32 == 11121314, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value64 = 0xdeadbeef; + vih.AvgTimePerFrame = 1151617; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)10000000 << 32 | 1151617), "Unexpected value %#I64x.\n", value64); + + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + /* only rcSource is considered, translated into both MF_MT_MINIMUM_DISPLAY_APERTURE and MF_MT_PAN_SCAN_APERTURE */ + value32 = 0xdeadbeef; + vih.rcSource = source; + vih.rcTarget = target; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %d.\n", value32); + ok(!memcmp(&aperture, &expect_aperture, sizeof(aperture)), "unexpected aperture\n"); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "got %d.\n", (UINT32)value32); + + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %d.\n", value32); + ok(!memcmp(&aperture, &expect_aperture, sizeof(aperture)), "unexpected aperture\n"); + + hr = IMFMediaType_GetItem(media_type, &MF_MT_GEOMETRIC_APERTURE, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &MFVideoFormat_NV12); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + + + /* biBitCount is used for implicit RGB format if GUID is NULL */ + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + for (vih.bmiHeader.biBitCount = 1; vih.bmiHeader.biBitCount <= 32; vih.bmiHeader.biBitCount++) + { + winetest_push_context("%u", vih.bmiHeader.biBitCount); + + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); + if (vih.bmiHeader.biBitCount != 1 && vih.bmiHeader.biBitCount != 4 && vih.bmiHeader.biBitCount != 8 + && vih.bmiHeader.biBitCount != 16 && vih.bmiHeader.biBitCount != 24 && vih.bmiHeader.biBitCount != 32) + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + else + { + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + memset(&guid, 0xcd, sizeof(guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount == 32) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 24) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB24), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 16) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB555), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 8) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB8), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 4) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB4), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 1) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB1), "Unexpected guid %s.\n", debugstr_guid(&guid)); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount > 1) + ok(value32 == 16 * vih.bmiHeader.biBitCount / 8, "Unexpected value %#x.\n", value32); + else + todo_wine ok(value32 == -4, "Unexpected value %#x.\n", value32); + + hr = IMFMediaType_GetItem(media_type, &MF_MT_PALETTE, NULL); + if (vih.bmiHeader.biBitCount > 1) + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + else + todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + + value32 = 0xdeadbeef; + vih.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount > 1) + ok(value32 == -16 * vih.bmiHeader.biBitCount / 8, "Unexpected value %#x.\n", value32); + else + todo_wine ok(value32 == -4, "Unexpected value %#x.\n", value32); + + vih.bmiHeader.biHeight = -32; + } + + winetest_pop_context(); + } + + IMFMediaType_Release(media_type); +} + +static void test_MFInitMediaTypeFromVideoInfoHeader2(void) +{ + static const MFVideoArea expect_aperture = {.OffsetX = {.value = 1}, .OffsetY = {.value = 2}, .Area = {.cx = 3, .cy = 5}}; + static const RECT source = {1, 2, 4, 7}, target = {3, 2, 12, 9}; + IMFMediaType *media_type; + VIDEOINFOHEADER2 vih; + MFVideoArea aperture; + UINT32 value32; + UINT64 value64; + HRESULT hr; + GUID guid; + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + memset(&vih, 0, sizeof(vih)); + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, 0, NULL); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), NULL); + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &GUID_NULL), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biWidth = 16; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biHeight = -32; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.bmiHeader.biSizeImage = 12345; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwBitRate = 678910; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BITRATE, &value32); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value32 == 678910, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwBitErrorRate = 11121314; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AVG_BIT_ERROR_RATE, &value32); + todo_wine + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value32 == 11121314, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value64 = 0xdeadbeef; + vih.AvgTimePerFrame = 1151617; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_RATE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)10000000 << 32 | 1151617), "Unexpected value %#I64x.\n", value64); + + value32 = 0xdeadbeef; + vih.dwInterlaceFlags = AMINTERLACE_IsInterlaced | AMINTERLACE_DisplayModeBobOrWeave; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoInterlace_MixedInterlaceOrProgressive + || broken(value32 == MFVideoInterlace_FieldInterleavedLowerFirst) /* Win7 */, + "Unexpected value %#x.\n", value32); + + vih.dwPictAspectRatioX = 123; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value64 = 0xdeadbeef; + vih.dwPictAspectRatioY = 456; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + todo_wine + ok(value64 == ((UINT64)41 << 32 | 76), "Unexpected value %#I64x.\n", value64); + + vih.dwControlFlags = AMCONTROL_COLORINFO_PRESENT; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_YUV_MATRIX, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwControlFlags = AMCONTROL_COLORINFO_PRESENT | (MFVideoTransferMatrix_SMPTE240M << 15); + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_YUV_MATRIX, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFVideoTransferMatrix_SMPTE240M, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwControlFlags = AMCONTROL_COLORINFO_PRESENT | (MFVideoTransferMatrix_SMPTE240M << 15) | (MFNominalRange_Wide << 12); + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_VIDEO_NOMINAL_RANGE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == MFNominalRange_Wide, "Unexpected value %#x.\n", value32); + + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &MFVideoFormat_NV12); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_GEOMETRIC_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + /* only rcSource is considered, translated into both MF_MT_MINIMUM_DISPLAY_APERTURE and MF_MT_PAN_SCAN_APERTURE */ + value32 = 0xdeadbeef; + vih.rcSource = source; + vih.rcTarget = target; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %d.\n", value32); + ok(!memcmp(&aperture, &expect_aperture, sizeof(aperture)), "unexpected aperture\n"); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_PAN_SCAN_ENABLED, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "got %d.\n", (UINT32)value32); + + value32 = 0xdeadbeef; + memset(&aperture, 0xcd, sizeof(aperture)); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %d.\n", value32); + ok(!memcmp(&aperture, &expect_aperture, sizeof(aperture)), "unexpected aperture\n"); + + hr = IMFMediaType_GetItem(media_type, &MF_MT_GEOMETRIC_APERTURE, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - if (!pMFInitVideoFormat_RGB) - { - win_skip("MFInitVideoFormat_RGB is not available.\n"); - return; - } - hr = pMFInitVideoFormat_RGB(NULL, 64, 32, 0); + /* biBitCount is used for implicit RGB format if GUID is NULL */ + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), NULL); ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - for (i = 0; i < ARRAY_SIZE(formats); ++i) + for (vih.bmiHeader.biBitCount = 1; vih.bmiHeader.biBitCount <= 32; vih.bmiHeader.biBitCount++) { - memset(&format, 0, sizeof(format)); - hr = pMFInitVideoFormat_RGB(&format, 64, 32, formats[i]); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - if (SUCCEEDED(hr)) - check_video_format(&format, 64, 32, formats[i]); + winetest_push_context("%u", vih.bmiHeader.biBitCount); + + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), NULL); + if (vih.bmiHeader.biBitCount != 1 && vih.bmiHeader.biBitCount != 4 && vih.bmiHeader.biBitCount != 8 + && vih.bmiHeader.biBitCount != 16 && vih.bmiHeader.biBitCount != 24 && vih.bmiHeader.biBitCount != 32) + ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + else + { + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + memset(&guid, 0xcd, sizeof(guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount == 32) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 24) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB24), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 16) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB555), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 8) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB8), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 4) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB4), "Unexpected guid %s.\n", debugstr_guid(&guid)); + else if (vih.bmiHeader.biBitCount == 1) + ok(IsEqualGUID(&guid, &MFVideoFormat_RGB1), "Unexpected guid %s.\n", debugstr_guid(&guid)); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount > 1) + ok(value32 == 16 * vih.bmiHeader.biBitCount / 8, "Unexpected value %#x.\n", value32); + else + todo_wine ok(value32 == -4, "Unexpected value %#x.\n", value32); + + hr = IMFMediaType_GetItem(media_type, &MF_MT_PALETTE, NULL); + if (vih.bmiHeader.biBitCount > 1) + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + else + todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + value32 = 0xdeadbeef; + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + + value32 = 0xdeadbeef; + vih.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromVideoInfoHeader2(media_type, &vih, sizeof(vih), NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + if (vih.bmiHeader.biBitCount > 1) + ok(value32 == -16 * vih.bmiHeader.biBitCount / 8, "Unexpected value %#x.\n", value32); + else + todo_wine ok(value32 == -4, "Unexpected value %#x.\n", value32); + + vih.bmiHeader.biHeight = -32; + } + + winetest_pop_context(); } + + IMFMediaType_Release(media_type); } -static void test_MFCreateVideoMediaTypeFromVideoInfoHeader(void) +static void test_MFInitMediaTypeFromMPEG1VideoInfo(void) { - IMFVideoMediaType *media_type; - KS_VIDEOINFOHEADER vih; + IMFMediaType *media_type; + MPEG1VIDEOINFO vih; + BYTE buffer[64]; UINT32 value32; UINT64 value64; HRESULT hr; GUID guid; - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(NULL, 0, 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); - todo_wine - ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); memset(&vih, 0, sizeof(vih)); - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, 0, 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); - todo_wine - ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); - todo_wine + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, 0, NULL); ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - vih.bmiHeader.biSize = sizeof(vih.bmiHeader); - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 0, 0, MFVideoInterlace_Unknown, 0, NULL, &media_type); - todo_wine + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), NULL); ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - vih.bmiHeader.biSize = sizeof(vih.bmiHeader); - vih.bmiHeader.biPlanes = 1; - vih.bmiHeader.biWidth = 16; - vih.bmiHeader.biHeight = 32; - vih.bmiHeader.biBitCount = 32; - - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, - MFVideoFlag_AnalogProtected, &GUID_NULL, &media_type); - todo_wine + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - if (FAILED(hr)) return; - IMFVideoMediaType_Release(media_type); - - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, - MFVideoFlag_AnalogProtected, NULL, &media_type); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(IsEqualGUID(&guid, &GUID_NULL), "Unexpected guid %s.\n", debugstr_guid(&guid)); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFVideoMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); + vih.hdr.bmiHeader.biWidth = 16; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); - hr = IMFVideoMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.hdr.bmiHeader.biHeight = -32; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "Unexpected guid %s.\n", debugstr_guid(&guid)); - hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); - hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.hdr.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)3 << 32 | 2), "Unexpected value %#I64x.\n", value64); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DRM_FLAGS, &value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == MFVideoDRMFlag_AnalogProtected, "Unexpected value %#x.\n", value32); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == 2048, "Unexpected value %u.\n", value32); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == -64, "Unexpected value %d.\n", value32); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + ok(value64 == ((UINT64)1 << 32 | 1), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.hdr.bmiHeader.biXPelsPerMeter = 2; + vih.hdr.bmiHeader.biYPelsPerMeter = 3; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(!!value32, "Unexpected value %#x.\n", value32); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(!!value32, "Unexpected value %#x.\n", value32); + ok(value64 == ((UINT64)1 << 32 | 1), "Unexpected value %#I64x.\n", value64); - IMFVideoMediaType_Release(media_type); + value32 = 0xdeadbeef; + vih.hdr.bmiHeader.biSizeImage = 12345; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - /* Negative height. */ - vih.bmiHeader.biHeight = -32; - hr = MFCreateVideoMediaTypeFromVideoInfoHeader(&vih, sizeof(vih), 3, 2, MFVideoInterlace_Progressive, - MFVideoFlag_AnalogProtected, NULL, &media_type); + value32 = 0xdeadbeef; + vih.dwStartTimeCode = 1234; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFVideoMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); - hr = IMFVideoMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(value32 == 1234, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetItem(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + memset(buffer, 0xcd, sizeof(buffer)); + vih.cbSequenceHeader = 1; + vih.bSequenceHeader[0] = 0xad; + hr = MFInitMediaTypeFromMPEG1VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == 64, "Unexpected value %d.\n", value32); - IMFVideoMediaType_Release(media_type); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, buffer, sizeof(buffer), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1, "Unexpected value %#x.\n", value32); + ok(buffer[0] == 0xad, "Unexpected value %#x.\n", buffer[0]); + + IMFMediaType_Release(media_type); } -static void test_MFInitMediaTypeFromVideoInfoHeader(void) +static void test_MFInitMediaTypeFromMPEG2VideoInfo(void) { IMFMediaType *media_type; - VIDEOINFOHEADER vih; + MPEG2VIDEOINFO vih; + DWORD buffer[64]; UINT32 value32; UINT64 value64; HRESULT hr; @@ -9895,22 +11759,13 @@ static void test_MFInitMediaTypeFromVideoInfoHeader(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); memset(&vih, 0, sizeof(vih)); - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, 0, NULL); - todo_wine + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, 0, NULL); ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); - todo_wine + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), NULL); ok(hr == E_INVALIDARG, "Unexpected hr %#lx.\n", hr); - vih.bmiHeader.biSize = sizeof(vih.bmiHeader); - vih.bmiHeader.biPlanes = 1; - vih.bmiHeader.biWidth = 16; - vih.bmiHeader.biHeight = 32; - vih.bmiHeader.biBitCount = 32; - - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); @@ -9918,83 +11773,124 @@ static void test_MFInitMediaTypeFromVideoInfoHeader(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(IsEqualGUID(&guid, &GUID_NULL), "Unexpected guid %s.\n", debugstr_guid(&guid)); hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); - hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)1 << 32 | 1), "Unexpected value %#I64x.\n", value64); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); - - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + todo_wine ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + + vih.hdr.bmiHeader.biWidth = 16; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + todo_wine ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); + todo_wine ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - vih.bmiHeader.biHeight = -32; - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), &GUID_NULL); + vih.hdr.bmiHeader.biHeight = -32; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - vih.bmiHeader.biHeight = 32; - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); - todo_wine - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - if (FAILED(hr)) goto failed; - - hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &guid); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(IsEqualGUID(&guid, &MFMediaType_Video), "Unexpected guid %s.\n", debugstr_guid(&guid)); - hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &guid); + vih.hdr.bmiHeader.biHeight = 32; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - todo_wine - ok(IsEqualGUID(&guid, &MFVideoFormat_RGB32), "Unexpected guid %s.\n", debugstr_guid(&guid)); hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); - hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); - ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)1 << 32 | 1), "Unexpected value %#I64x.\n", value64); hr = IMFMediaType_GetUINT32(media_type, &MF_MT_INTERLACE_MODE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value32 == MFVideoInterlace_Progressive, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + vih.hdr.bmiHeader.biXPelsPerMeter = 2; + vih.hdr.bmiHeader.biYPelsPerMeter = 3; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == 2048, "Unexpected value %u.\n", value32); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_PIXEL_ASPECT_RATIO, &value64); + todo_wine + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.hdr.bmiHeader.biSizeImage = 12345; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == -64, "Unexpected value %d.\n", value32); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_FIXED_SIZE_SAMPLES, &value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(!!value32, "Unexpected value %#x.\n", value32); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value32); + ok(value32 == 12345, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, &value32); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwStartTimeCode = 1234; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(!!value32, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1234, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetItem(media_type, &MF_MT_MPEG2_PROFILE, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); - /* Negative height. */ - vih.bmiHeader.biHeight = -32; - hr = MFInitMediaTypeFromVideoInfoHeader(media_type, &vih, sizeof(vih), NULL); + value32 = 0xdeadbeef; + vih.dwProfile = 1234; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_PROFILE, &value32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value32 == 64, "Unexpected value %d.\n", value32); - hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(value32 == 1234, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetItem(media_type, &MF_MT_MPEG2_LEVEL, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwLevel = 1234; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); - ok(value64 == ((UINT64)16 << 32 | 32), "Unexpected value %#I64x.\n", value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_LEVEL, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1234, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetItem(media_type, &MF_MT_MPEG2_FLAGS, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + vih.dwFlags = 1234; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_FLAGS, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 1234, "Unexpected value %#x.\n", value32); + hr = IMFMediaType_GetItem(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, NULL); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + + value32 = 0xdeadbeef; + memset(buffer, 0xcd, sizeof(buffer)); + vih.cbSequenceHeader = 3; + vih.dwSequenceHeader[0] = 0xabcdef; + hr = MFInitMediaTypeFromMPEG2VideoInfo(media_type, &vih, sizeof(vih), &GUID_NULL); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, (BYTE *)buffer, sizeof(buffer), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 3, "Unexpected value %#x.\n", value32); + ok(buffer[0] == 0xcdabcdef, "Unexpected value %#lx.\n", buffer[0]); -failed: IMFMediaType_Release(media_type); } static void test_MFInitMediaTypeFromAMMediaType(void) { + static const MFVideoArea expect_aperture = {.OffsetX = {.value = 13}, .OffsetY = {.value = 46}, .Area = {.cx = 110, .cy = 410}}; + static const RECT source = {13, 46, 123, 456}, target = {25, 34, 107, 409}; IMFMediaType *media_type; AM_MEDIA_TYPE mt; UINT32 value32; @@ -10045,6 +11941,7 @@ static void test_MFInitMediaTypeFromAMMediaType(void) { &MEDIASUBTYPE_h264, &MEDIASUBTYPE_h264 }, { &MEDIASUBTYPE_H264, &MFVideoFormat_H264 }, }; + MFVideoArea aperture; unsigned int i; hr = MFCreateMediaType(&media_type); @@ -10164,6 +12061,32 @@ static void test_MFInitMediaTypeFromAMMediaType(void) ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); ok(value32 == 128, "Unexpected value %d.\n", value32); + /* only rcSource is considered, lSampleSize is ignored if biSizeImage was present */ + memcpy(&mt.subtype, &MEDIASUBTYPE_RGB32, sizeof(GUID)); + vih.rcSource = source; + vih.rcTarget = target; + vih.bmiHeader.biWidth = 432; + vih.bmiHeader.biHeight = -654; + vih.bmiHeader.biSizeImage = 12345678; + mt.lSampleSize = 87654321; + hr = MFInitMediaTypeFromAMMediaType(media_type, &mt); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFMediaType_GetUINT64(media_type, &MF_MT_FRAME_SIZE, &value64); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok((UINT32)(value64 >> 32) == 432, "got %d.\n", (UINT32)(value64 >> 32)); + ok((UINT32)value64 == 654, "got %d.\n", (UINT32)value64); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 432 * 4, "got %d.\n", (UINT32)value32); + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_SAMPLE_SIZE, &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == 12345678, "got %d.\n", (UINT32)value32); + hr = IMFMediaType_GetBlob(media_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), &value32); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value32 == sizeof(aperture), "got %d.\n", value32); + ok(!memcmp(&aperture, &expect_aperture, sizeof(aperture)), "unexpected aperture\n"); + vih.bmiHeader.biHeight = 24; for (i = 0; i < ARRAY_SIZE(guid_types); ++i) { @@ -10563,6 +12486,7 @@ START_TEST(mfplat) test_MFCreateMFVideoFormatFromMFMediaType(); test_MFInitAMMediaTypeFromMFMediaType(); test_MFCreateAMMediaTypeFromMFMediaType(); + test_MFInitMediaTypeFromMFVideoFormat(); test_IMFMediaType_GetRepresentation(); test_MFCreateMediaTypeFromRepresentation(); test_MFCreateDXSurfaceBuffer(); @@ -10582,6 +12506,9 @@ START_TEST(mfplat) test_MFInitVideoFormat_RGB(); test_MFCreateVideoMediaTypeFromVideoInfoHeader(); test_MFInitMediaTypeFromVideoInfoHeader(); + test_MFInitMediaTypeFromVideoInfoHeader2(); + test_MFInitMediaTypeFromMPEG1VideoInfo(); + test_MFInitMediaTypeFromMPEG2VideoInfo(); test_MFInitMediaTypeFromAMMediaType(); test_MFCreatePathFromURL(); test_2dbuffer_copy(); diff --git a/dlls/mfreadwrite/reader.c b/dlls/mfreadwrite/reader.c index 1461d7acd7b..3ff8a5f4625 100644 --- a/dlls/mfreadwrite/reader.c +++ b/dlls/mfreadwrite/reader.c @@ -41,6 +41,8 @@ WINE_DEFAULT_DEBUG_CHANNEL(mfplat); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32, D3DFMT_A8B8G8R8); + struct stream_response { struct list entry; @@ -71,18 +73,23 @@ enum media_stream_flags STREAM_FLAG_STOPPED = 0x8, /* Received MEStreamStopped */ }; -struct stream_transform +struct transform_entry { + struct list entry; IMFTransform *transform; unsigned int min_buffer_size; + UINT32 pending_flags; + GUID category; + BOOL hidden; + BOOL attributes_initialized; }; struct media_stream { IMFMediaStream *stream; IMFMediaType *current; - struct stream_transform decoder; - IMFVideoSampleAllocatorEx *allocator; + struct list transforms; + IMFTransform *transform_service; DWORD id; unsigned int index; enum media_stream_state state; @@ -202,6 +209,30 @@ static ULONG source_reader_addref(struct source_reader *reader) return InterlockedIncrement(&reader->refcount); } +static void transform_entry_destroy(struct transform_entry *entry) +{ + IMFTransform_Release(entry->transform); + free(entry); +} + +static void media_stream_destroy(struct media_stream *stream) +{ + struct transform_entry *entry, *next; + + LIST_FOR_EACH_ENTRY_SAFE(entry, next, &stream->transforms, struct transform_entry, entry) + { + list_remove(&entry->entry); + transform_entry_destroy(entry); + } + + if (stream->transform_service) + IMFTransform_Release(stream->transform_service); + if (stream->stream) + IMFMediaStream_Release(stream->stream); + if (stream->current) + IMFMediaType_Release(stream->current); +} + static ULONG source_reader_release(struct source_reader *reader) { ULONG refcount = InterlockedDecrement(&reader->refcount); @@ -222,19 +253,10 @@ static ULONG source_reader_release(struct source_reader *reader) for (i = 0; i < reader->stream_count; ++i) { struct media_stream *stream = &reader->streams[i]; - - if (stream->stream) - IMFMediaStream_Release(stream->stream); - if (stream->current) - IMFMediaType_Release(stream->current); - if (stream->decoder.transform) - IMFTransform_Release(stream->decoder.transform); - if (stream->allocator) - IMFVideoSampleAllocatorEx_Release(stream->allocator); + media_stream_destroy(stream); } source_reader_release_responses(reader, NULL); free(reader->streams); - MFUnlockWorkQueue(reader->queue); DeleteCriticalSection(&reader->cs); free(reader); } @@ -242,461 +264,6 @@ static ULONG source_reader_release(struct source_reader *reader) return refcount; } -struct passthrough_transform -{ - IMFTransform IMFTransform_iface; - LONG refcount; - IMFMediaType *type; - IMFAttributes *attributes; - IMFAttributes *input_attributes; - IMFAttributes *output_attributes; - IMFSample *sample; -}; - -static inline struct passthrough_transform *impl_from_IMFTransform(IMFTransform *iface) -{ - return CONTAINING_RECORD(iface, struct passthrough_transform, IMFTransform_iface); -} - -static HRESULT WINAPI passthrough_transform_QueryInterface(IMFTransform *iface, REFIID riid, void **out) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %s, %p.\n", iface, debugstr_guid(riid), out); - - if (IsEqualIID(riid, &IID_IUnknown) || - IsEqualIID(riid, &IID_IMFTransform)) - { - *out = &transform->IMFTransform_iface; - } - else - { - FIXME("(%s, %p)\n", debugstr_guid(riid), out); - *out = NULL; - return E_NOINTERFACE; - } - - IUnknown_AddRef(iface); - return S_OK; -} - -static ULONG WINAPI passthrough_transform_AddRef(IMFTransform *iface) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedIncrement(&transform->refcount); - - TRACE("%p, refcount %lu.\n", iface, refcount); - - return refcount; -} - -static ULONG WINAPI passthrough_transform_Release(IMFTransform *iface) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedDecrement(&transform->refcount); - - TRACE("%p, refcount %lu.\n", iface, refcount); - - if (!refcount) - { - if (transform->type) - IMFMediaType_Release(transform->type); - IMFAttributes_Release(transform->attributes); - IMFAttributes_Release(transform->input_attributes); - IMFAttributes_Release(transform->output_attributes); - if (transform->sample) - IMFSample_Release(transform->sample); - } - - return refcount; -} -static HRESULT WINAPI passthrough_transform_GetStreamLimits(IMFTransform *iface, - DWORD *input_minimum, DWORD *input_maximum, DWORD *output_minimum, DWORD *output_maximum) -{ - TRACE("%p, %p, %p, %p, %p.\n", iface, input_minimum, input_maximum, output_minimum, output_maximum); - - *input_minimum = 1; - *input_maximum = 1; - *output_minimum = 1; - *output_maximum = 1; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) -{ - TRACE("%p, %p, %p.\n", iface, inputs, outputs); - - *inputs = 1; - *outputs = 1; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetStreamIDs(IMFTransform *iface, - DWORD input_size, DWORD *inputs, DWORD output_size, DWORD *outputs) -{ - TRACE("%p, %ld, %p, %ld, %p.\n", iface, input_size, inputs, output_size, outputs); - - if (input_size < 1 || output_size < 1) - return MF_E_BUFFERTOOSMALL; - - inputs[0] = 0; - outputs[0] = 0; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) -{ - TRACE("%p, %ld, %p.\n", iface, id, info); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - info->hnsMaxLatency = 0; - info->dwFlags = MFT_INPUT_STREAM_PROCESSES_IN_PLACE; - info->cbSize = 0; - info->cbMaxLookahead = 0; - info->cbAlignment = 0; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) -{ - TRACE("%p, %ld, %p.\n", iface, id, info); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - info->dwFlags = MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; - info->cbSize = 0; - info->cbAlignment = 0; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %p.\n", iface, attributes); - - IMFAttributes_AddRef(transform->attributes); - - *attributes = transform->attributes; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p.\n", iface, id, attributes); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - IMFAttributes_AddRef(transform->input_attributes); - - *attributes = transform->input_attributes; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p.\n", iface, id, attributes); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - IMFAttributes_AddRef(transform->output_attributes); - - *attributes = transform->output_attributes; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_DeleteInputStream(IMFTransform *iface, DWORD id) -{ - TRACE("%p, %ld.\n", iface, id); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) -{ - TRACE("%p, %ld, %p.\n", iface, streams, ids); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, IMFMediaType **type) -{ - TRACE("%p, %ld, %ld, %p.\n", iface, id, index, type); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_GetOutputAvailableType(IMFTransform *iface, DWORD id, DWORD index, IMFMediaType **type) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %ld, %p.\n", iface, id, index, type); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (index != 0) - return MF_E_NO_MORE_TYPES; - - if (!transform->type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - *type = transform->type; - IMFMediaType_AddRef(*type); - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p, %ld.\n", iface, id, type, flags); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (!(flags & MFT_SET_TYPE_TEST_ONLY)) - { - if (transform->type) - IMFMediaType_Release(transform->type); - transform->type = type; - IMFMediaType_AddRef(type); - } - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - DWORD cmp_flags; - HRESULT hr; - - TRACE("%p, %ld, %p, %ld.\n", iface, id, type, flags); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (!transform->type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - hr = IMFMediaType_IsEqual(transform->type, type, &cmp_flags); - if (FAILED(hr)) - return hr; - - if (!(cmp_flags & MF_MEDIATYPE_EQUAL_FORMAT_DATA)) - return MF_E_INVALIDMEDIATYPE; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p.\n", iface, id, type); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (!transform->type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - *type = transform->type; - IMFMediaType_AddRef(*type); - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p.\n", iface, id, type); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (!transform->type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - *type = transform->type; - IMFMediaType_AddRef(*type); - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p.\n", iface, id, flags); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - *flags = transform->sample ? 0 : MFT_INPUT_STATUS_ACCEPT_DATA; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_GetOutputStatus(IMFTransform *iface, DWORD *flags) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %p.\n", iface, flags); - - *flags = transform->sample ? MFT_OUTPUT_STATUS_SAMPLE_READY : 0; - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) -{ - FIXME("%p, %s, %s.\n", iface, wine_dbgstr_longlong(lower), wine_dbgstr_longlong(upper)); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) -{ - FIXME("%p, %ld, %p.\n", iface, id, event); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) -{ - FIXME("%p, %u, %Iu.\n", iface, message, param); - - return E_NOTIMPL; -} - -static HRESULT WINAPI passthrough_transform_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - - TRACE("%p, %ld, %p, %ld.\n", iface, id, sample, flags); - - if (id != 0) - return MF_E_INVALIDSTREAMNUMBER; - - if (transform->sample) - return MF_E_NOTACCEPTING; - - transform->sample = sample; - IMFSample_AddRef(sample); - - return S_OK; -} - -static HRESULT WINAPI passthrough_transform_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, - MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) -{ - struct passthrough_transform *transform = impl_from_IMFTransform(iface); - unsigned int i; - - TRACE("%p, %ld, %ld, %p, %p.\n", iface, flags, count, samples, status); - - if (!transform->sample) - return MF_E_TRANSFORM_NEED_MORE_INPUT; - - if (samples[0].dwStreamID != 0) - return MF_E_INVALIDSTREAMNUMBER; - - samples[0].pSample = transform->sample; - transform->sample = NULL; - - for (i = 1; i < count; ++i) - samples[i].dwStatus = MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE; - - *status = 0; - - return S_OK; -} - -static const IMFTransformVtbl passthrough_transform_vtbl = { - passthrough_transform_QueryInterface, - passthrough_transform_AddRef, - passthrough_transform_Release, - passthrough_transform_GetStreamLimits, - passthrough_transform_GetStreamCount, - passthrough_transform_GetStreamIDs, - passthrough_transform_GetInputStreamInfo, - passthrough_transform_GetOutputStreamInfo, - passthrough_transform_GetAttributes, - passthrough_transform_GetInputStreamAttributes, - passthrough_transform_GetOutputStreamAttributes, - passthrough_transform_DeleteInputStream, - passthrough_transform_AddInputStreams, - passthrough_transform_GetInputAvailableType, - passthrough_transform_GetOutputAvailableType, - passthrough_transform_SetInputType, - passthrough_transform_SetOutputType, - passthrough_transform_GetInputCurrentType, - passthrough_transform_GetOutputCurrentType, - passthrough_transform_GetInputStatus, - passthrough_transform_GetOutputStatus, - passthrough_transform_SetOutputBounds, - passthrough_transform_ProcessEvent, - passthrough_transform_ProcessMessage, - passthrough_transform_ProcessInput, - passthrough_transform_ProcessOutput, -}; - -static HRESULT create_passthrough_transform(IMFTransform **transform) -{ - struct passthrough_transform *obj; - HRESULT hr; - - if (!(obj = calloc(1, sizeof(*obj)))) - return E_OUTOFMEMORY; - - obj->IMFTransform_iface.lpVtbl = &passthrough_transform_vtbl; - obj->refcount = 1; - - hr = MFCreateAttributes(&obj->attributes, 0); - if (SUCCEEDED(hr)) - hr = MFCreateAttributes(&obj->input_attributes, 0); - if (SUCCEEDED(hr)) - hr = MFCreateAttributes(&obj->output_attributes, 0); - - if (SUCCEEDED(hr)) - { - *transform = &obj->IMFTransform_iface; - } - else - { - if (obj->attributes) - IMFAttributes_Release(obj->attributes); - if (obj->input_attributes) - IMFAttributes_Release(obj->input_attributes); - if (obj->output_attributes) - IMFAttributes_Release(obj->output_attributes); - free(obj); - } - - return hr; -} - static HRESULT WINAPI source_reader_async_command_QueryInterface(IUnknown *iface, REFIID riid, void **obj) { if (IsEqualIID(riid, &IID_IUnknown)) @@ -859,39 +426,6 @@ static void source_reader_response_ready(struct source_reader *reader, struct st stream->requests--; } -static void source_reader_copy_sample_buffer(IMFSample *src, IMFSample *dst) -{ - IMFMediaBuffer *buffer; - LONGLONG time; - DWORD flags; - HRESULT hr; - - IMFSample_CopyAllItems(src, (IMFAttributes *)dst); - - IMFSample_SetSampleDuration(dst, 0); - IMFSample_SetSampleTime(dst, 0); - IMFSample_SetSampleFlags(dst, 0); - - if (SUCCEEDED(IMFSample_GetSampleDuration(src, &time))) - IMFSample_SetSampleDuration(dst, time); - - if (SUCCEEDED(IMFSample_GetSampleTime(src, &time))) - IMFSample_SetSampleTime(dst, time); - - if (SUCCEEDED(IMFSample_GetSampleFlags(src, &flags))) - IMFSample_SetSampleFlags(dst, flags); - - if (SUCCEEDED(IMFSample_ConvertToContiguousBuffer(src, NULL))) - { - if (SUCCEEDED(IMFSample_GetBufferByIndex(dst, 0, &buffer))) - { - if (FAILED(hr = IMFSample_CopyToBuffer(src, buffer))) - WARN("Failed to copy a buffer, hr %#lx.\n", hr); - IMFMediaBuffer_Release(buffer); - } - } -} - static HRESULT source_reader_queue_response(struct source_reader *reader, struct media_stream *stream, HRESULT status, DWORD stream_flags, LONGLONG timestamp, IMFSample *sample) { @@ -918,6 +452,17 @@ static HRESULT source_reader_queue_response(struct source_reader *reader, struct return S_OK; } +static HRESULT source_reader_queue_sample(struct source_reader *reader, struct media_stream *stream, + UINT flags, IMFSample *sample) +{ + LONGLONG timestamp = 0; + + if (FAILED(IMFSample_GetSampleTime(sample, ×tamp))) + WARN("Sample time wasn't set.\n"); + + return source_reader_queue_response(reader, stream, S_OK, flags, timestamp, sample); +} + static HRESULT source_reader_request_sample(struct source_reader *reader, struct media_stream *stream) { HRESULT hr = S_OK; @@ -1099,55 +644,244 @@ static ULONG WINAPI source_reader_stream_events_callback_Release(IMFAsyncCallbac return source_reader_release(reader); } -static HRESULT source_reader_pull_stream_samples(struct source_reader *reader, struct media_stream *stream) +static HRESULT source_reader_allocate_stream_sample(IMFTransform *transform, MFT_OUTPUT_STREAM_INFO *info, IMFSample **out) { - MFT_OUTPUT_STREAM_INFO stream_info = { 0 }; - MFT_OUTPUT_DATA_BUFFER out_buffer; - unsigned int buffer_size; + IMFMediaType *media_type; IMFMediaBuffer *buffer; - LONGLONG timestamp; + IMFSample *sample; + HRESULT hr; + + *out = NULL; + if (SUCCEEDED(hr = IMFTransform_GetOutputCurrentType(transform, 0, &media_type))) + { + hr = MFCreateMediaBufferFromMediaType(media_type, 10000000, info->cbSize, info->cbAlignment, &buffer); + IMFMediaType_Release(media_type); + } + if (FAILED(hr) && FAILED(hr = MFCreateAlignedMemoryBuffer(info->cbSize, info->cbAlignment, &buffer))) + return hr; + + if (SUCCEEDED(hr = MFCreateSample(&sample))) + { + if (SUCCEEDED(hr = IMFSample_AddBuffer(sample, buffer))) + *out = sample; + else + IMFSample_Release(sample); + } + + IMFMediaBuffer_Release(buffer); + return hr; +} + +static void media_type_try_copy_attr(IMFMediaType *dst, IMFMediaType *src, const GUID *attr, HRESULT *hr) +{ + PROPVARIANT value; + + PropVariantInit(&value); + if (SUCCEEDED(*hr) && FAILED(IMFMediaType_GetItem(dst, attr, NULL)) + && SUCCEEDED(IMFMediaType_GetItem(src, attr, &value))) + *hr = IMFMediaType_SetItem(dst, attr, &value); + PropVariantClear(&value); +} + +/* update a media type with additional attributes reported by upstream element */ +/* also present in mf/topology_loader.c pipeline */ +static HRESULT update_media_type_from_upstream(IMFMediaType *media_type, IMFMediaType *upstream_type) +{ + HRESULT hr = S_OK; + + /* propagate common video attributes */ + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FRAME_SIZE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FRAME_RATE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_DEFAULT_STRIDE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_ROTATION, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_FIXED_SIZE_SAMPLES, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_PIXEL_ASPECT_RATIO, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, &hr); + + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_CHROMA_SITING, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_INTERLACE_MODE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_TRANSFER_FUNCTION, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_PRIMARIES, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_YUV_MATRIX, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_LIGHTING, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_VIDEO_NOMINAL_RANGE, &hr); + + /* propagate common audio attributes */ + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_NUM_CHANNELS, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_CHANNEL_MASK, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_SAMPLES_PER_BLOCK, &hr); + media_type_try_copy_attr(media_type, upstream_type, &MF_MT_AUDIO_VALID_BITS_PER_SAMPLE, &hr); + + return hr; +} + +static HRESULT source_reader_pull_transform_samples(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry); +static HRESULT source_reader_push_transform_samples(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry, IMFSample *sample) +{ + HRESULT hr; + + do + { + if (FAILED(hr = source_reader_pull_transform_samples(reader, stream, entry)) + && hr != MF_E_TRANSFORM_NEED_MORE_INPUT) + return hr; + if (SUCCEEDED(hr = IMFTransform_ProcessInput(entry->transform, 0, sample, 0))) + return source_reader_pull_transform_samples(reader, stream, entry); + } + while (hr == MF_E_NOTACCEPTING); + + return hr; +} + +/* update the transform output type while keeping subtype which matches the old output type */ +static HRESULT transform_entry_update_output_type(struct transform_entry *entry, IMFMediaType *old_output_type) +{ + IMFMediaType *new_output_type; + GUID subtype, desired; + UINT i = 0; + HRESULT hr; + + IMFMediaType_GetGUID(old_output_type, &MF_MT_SUBTYPE, &desired); + + /* find an available output type matching the desired subtype */ + while (SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(entry->transform, 0, i++, &new_output_type))) + { + IMFMediaType_GetGUID(new_output_type, &MF_MT_SUBTYPE, &subtype); + if (IsEqualGUID(&subtype, &desired) && SUCCEEDED(hr = IMFTransform_SetOutputType(entry->transform, 0, new_output_type, 0))) + { + entry->pending_flags |= MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED; + IMFMediaType_Release(new_output_type); + return S_OK; + } + IMFMediaType_Release(new_output_type); + } + + return hr; +} + +/* update the transform input type while keeping an output type which matches the current output subtype */ +static HRESULT transform_entry_update_input_type(struct transform_entry *entry, IMFMediaType *input_type) +{ + IMFMediaType *old_output_type, *new_output_type; + HRESULT hr; + + if (FAILED(hr = IMFTransform_GetOutputCurrentType(entry->transform, 0, &old_output_type))) + return hr; + if (FAILED(hr = IMFTransform_SetInputType(entry->transform, 0, input_type, 0))) + return hr; + + /* check if transform output type is still valid or if we need to update it as well */ + if (FAILED(hr = IMFTransform_GetOutputCurrentType(entry->transform, 0, &new_output_type))) + hr = transform_entry_update_output_type(entry, old_output_type); + else + IMFMediaType_Release(new_output_type); + + IMFMediaType_Release(old_output_type); + return hr; +} + +static void transform_entry_initialize_attributes(struct source_reader *reader, struct transform_entry *entry) +{ + IMFAttributes *attributes; + + if (SUCCEEDED(IMFTransform_GetAttributes(entry->transform, &attributes))) + { + if (FAILED(IMFAttributes_GetItem(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, NULL))) + IMFAttributes_SetUINT32(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, 6); + + IMFAttributes_Release(attributes); + } + + if (SUCCEEDED(IMFTransform_GetOutputStreamAttributes(entry->transform, 0, &attributes))) + { + UINT32 shared, shared_without_mutex, bind_flags; + + if (SUCCEEDED(IMFAttributes_GetUINT32(reader->attributes, &MF_SA_D3D11_SHARED, &shared))) + IMFAttributes_SetUINT32(attributes, &MF_SA_D3D11_SHARED, shared); + if (SUCCEEDED(IMFAttributes_GetUINT32(reader->attributes, &MF_SA_D3D11_SHARED_WITHOUT_MUTEX, &shared_without_mutex))) + IMFAttributes_SetUINT32(attributes, &MF_SA_D3D11_SHARED_WITHOUT_MUTEX, shared_without_mutex); + if (SUCCEEDED(IMFAttributes_GetUINT32(reader->attributes, &MF_SOURCE_READER_D3D11_BIND_FLAGS, &bind_flags))) + IMFAttributes_SetUINT32(attributes, &MF_SA_D3D11_BINDFLAGS, bind_flags); + else if ((reader->flags & SOURCE_READER_DXGI_DEVICE_MANAGER) && FAILED(IMFAttributes_GetItem(attributes, &MF_SA_D3D11_BINDFLAGS, NULL))) + IMFAttributes_SetUINT32(attributes, &MF_SA_D3D11_BINDFLAGS, 1024); + + IMFAttributes_Release(attributes); + } +} + +static HRESULT source_reader_pull_transform_samples(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry) +{ + MFT_OUTPUT_STREAM_INFO stream_info = {0}; + struct transform_entry *next = NULL; + struct list *ptr; DWORD status; HRESULT hr; - if (FAILED(hr = IMFTransform_GetOutputStreamInfo(stream->decoder.transform, 0, &stream_info))) + if ((ptr = list_next(&stream->transforms, &entry->entry))) + next = LIST_ENTRY(ptr, struct transform_entry, entry); + + if (!entry->attributes_initialized) { - WARN("Failed to get output stream info, hr %#lx.\n", hr); - return hr; + transform_entry_initialize_attributes(reader, entry); + entry->attributes_initialized = TRUE; } - for (;;) - { - memset(&out_buffer, 0, sizeof(out_buffer)); + if (FAILED(hr = IMFTransform_GetOutputStreamInfo(entry->transform, 0, &stream_info))) + return hr; + stream_info.cbSize = max(stream_info.cbSize, entry->min_buffer_size); - if (!(stream_info.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES))) - { - if (FAILED(hr = MFCreateSample(&out_buffer.pSample))) - break; + while (SUCCEEDED(hr)) + { + MFT_OUTPUT_DATA_BUFFER out_buffer = {0}; + IMFMediaType *media_type; - buffer_size = max(stream_info.cbSize, stream->decoder.min_buffer_size); + if (!(stream_info.dwFlags & (MFT_OUTPUT_STREAM_PROVIDES_SAMPLES | MFT_OUTPUT_STREAM_CAN_PROVIDE_SAMPLES)) + && FAILED(hr = source_reader_allocate_stream_sample(entry->transform, &stream_info, &out_buffer.pSample))) + break; - if (FAILED(hr = MFCreateAlignedMemoryBuffer(buffer_size, stream_info.cbAlignment, &buffer))) + if (SUCCEEDED(hr = IMFTransform_ProcessOutput(entry->transform, 0, 1, &out_buffer, &status))) + { + /* propagate upstream type to the transform input type */ + if ((entry->pending_flags & MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED) + && SUCCEEDED(hr = IMFTransform_GetOutputCurrentType(entry->transform, 0, &media_type))) { - IMFSample_Release(out_buffer.pSample); - break; + if (!next) + hr = IMFMediaType_CopyAllItems(media_type, (IMFAttributes *)stream->current); + else + hr = transform_entry_update_input_type(next, media_type); + IMFMediaType_Release(media_type); } - IMFSample_AddBuffer(out_buffer.pSample, buffer); - IMFMediaBuffer_Release(buffer); + if (FAILED(hr)) + source_reader_queue_response(reader, stream, hr, MF_SOURCE_READERF_ERROR, 0, NULL); + else if (next) + hr = source_reader_push_transform_samples(reader, stream, next, out_buffer.pSample); + else + hr = source_reader_queue_sample(reader, stream, entry->pending_flags, out_buffer.pSample); + + entry->pending_flags = 0; } - if (FAILED(hr = IMFTransform_ProcessOutput(stream->decoder.transform, 0, 1, &out_buffer, &status))) + if (hr == MF_E_TRANSFORM_STREAM_CHANGE && SUCCEEDED(hr = IMFTransform_GetOutputCurrentType(entry->transform, 0, &media_type))) { - if (out_buffer.pSample) - IMFSample_Release(out_buffer.pSample); - break; - } + hr = transform_entry_update_output_type(entry, media_type); + IMFMediaType_Release(media_type); - timestamp = 0; - if (FAILED(IMFSample_GetSampleTime(out_buffer.pSample, ×tamp))) - WARN("Sample time wasn't set.\n"); + if (SUCCEEDED(hr)) + { + hr = IMFTransform_GetOutputStreamInfo(entry->transform, 0, &stream_info); + stream_info.cbSize = max(stream_info.cbSize, entry->min_buffer_size); + } + } - source_reader_queue_response(reader, stream, S_OK /* FIXME */, 0, timestamp, out_buffer.pSample); if (out_buffer.pSample) IMFSample_Release(out_buffer.pSample); if (out_buffer.pEvents) @@ -1157,35 +891,72 @@ static HRESULT source_reader_pull_stream_samples(struct source_reader *reader, s return hr; } -static HRESULT source_reader_process_sample(struct source_reader *reader, struct media_stream *stream, - IMFSample *sample) +static HRESULT source_reader_drain_transform_samples(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry) { - LONGLONG timestamp; + struct transform_entry *next = NULL; + struct list *ptr; HRESULT hr; - if (!stream->decoder.transform) - { - timestamp = 0; - if (FAILED(IMFSample_GetSampleTime(sample, ×tamp))) - WARN("Sample time wasn't set.\n"); + if ((ptr = list_next(&stream->transforms, &entry->entry))) + next = LIST_ENTRY(ptr, struct transform_entry, entry); - return source_reader_queue_response(reader, stream, S_OK, 0, timestamp, sample); - } + if (FAILED(hr = IMFTransform_ProcessMessage(entry->transform, MFT_MESSAGE_COMMAND_DRAIN, 0))) + WARN("Failed to drain transform %p, hr %#lx\n", entry->transform, hr); + if (FAILED(hr = source_reader_pull_transform_samples(reader, stream, entry)) + && hr != MF_E_TRANSFORM_NEED_MORE_INPUT) + WARN("Failed to pull pending samples, hr %#lx.\n", hr); - /* It's assumed that decoder has 1 input and 1 output, both id's are 0. */ + return next ? source_reader_drain_transform_samples(reader, stream, next) : S_OK; +} - hr = source_reader_pull_stream_samples(reader, stream); - if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) - { - if (FAILED(hr = IMFTransform_ProcessInput(stream->decoder.transform, 0, sample, 0))) - { - WARN("Transform failed to process input, hr %#lx.\n", hr); - return hr; - } +static HRESULT source_reader_flush_transform_samples(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry) +{ + struct transform_entry *next = NULL; + struct list *ptr; + HRESULT hr; - if ((hr = source_reader_pull_stream_samples(reader, stream)) == MF_E_TRANSFORM_NEED_MORE_INPUT) - return S_OK; - } + if ((ptr = list_next(&stream->transforms, &entry->entry))) + next = LIST_ENTRY(ptr, struct transform_entry, entry); + + if (FAILED(hr = IMFTransform_ProcessMessage(entry->transform, MFT_MESSAGE_COMMAND_FLUSH, 0))) + WARN("Failed to flush transform %p, hr %#lx\n", entry->transform, hr); + + return next ? source_reader_flush_transform_samples(reader, stream, next) : S_OK; +} + +static HRESULT source_reader_notify_transform(struct source_reader *reader, struct media_stream *stream, + struct transform_entry *entry, UINT message) +{ + struct transform_entry *next = NULL; + struct list *ptr; + HRESULT hr; + + if ((ptr = list_next(&stream->transforms, &entry->entry))) + next = LIST_ENTRY(ptr, struct transform_entry, entry); + + if (FAILED(hr = IMFTransform_ProcessMessage(entry->transform, message, 0))) + WARN("Failed to notify transform %p message %#x, hr %#lx\n", entry->transform, message, hr); + + return next ? source_reader_notify_transform(reader, stream, next, message) : S_OK; +} + +static HRESULT source_reader_process_sample(struct source_reader *reader, struct media_stream *stream, + IMFSample *sample) +{ + struct transform_entry *entry; + struct list *ptr; + HRESULT hr; + + if (!(ptr = list_head(&stream->transforms))) + return source_reader_queue_sample(reader, stream, 0, sample); + entry = LIST_ENTRY(ptr, struct transform_entry, entry); + + /* It's assumed that decoder has 1 input and 1 output, both id's are 0. */ + if (SUCCEEDED(hr = source_reader_push_transform_samples(reader, stream, entry, sample)) + || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) + hr = stream->requests ? source_reader_request_sample(reader, stream) : S_OK; else WARN("Transform failed to process output, hr %#lx.\n", hr); @@ -1222,12 +993,8 @@ static HRESULT source_reader_media_sample_handler(struct source_reader *reader, if (id == reader->streams[i].id) { /* FIXME: propagate processing errors? */ - reader->streams[i].flags &= ~STREAM_FLAG_SAMPLE_REQUESTED; hr = source_reader_process_sample(reader, &reader->streams[i], sample); - if (reader->streams[i].requests) - source_reader_request_sample(reader, &reader->streams[i]); - break; } } @@ -1248,6 +1015,7 @@ static HRESULT source_reader_media_stream_state_handler(struct source_reader *re MediaEventType event_type; LONGLONG timestamp; PROPVARIANT value; + struct list *ptr; unsigned int i; HRESULT hr; DWORD id; @@ -1274,11 +1042,11 @@ static HRESULT source_reader_media_stream_state_handler(struct source_reader *re stream->state = STREAM_STATE_EOS; stream->flags &= ~STREAM_FLAG_SAMPLE_REQUESTED; - if (stream->decoder.transform && SUCCEEDED(IMFTransform_ProcessMessage(stream->decoder.transform, - MFT_MESSAGE_COMMAND_DRAIN, 0))) + if ((ptr = list_head(&stream->transforms))) { - if ((hr = source_reader_pull_stream_samples(reader, stream)) != MF_E_TRANSFORM_NEED_MORE_INPUT) - WARN("Failed to pull pending samples, hr %#lx.\n", hr); + struct transform_entry *entry = LIST_ENTRY(ptr, struct transform_entry, entry); + if (FAILED(hr = source_reader_drain_transform_samples(reader, stream, entry))) + WARN("Failed to drain pending samples, hr %#lx.\n", hr); } while (stream->requests) @@ -1288,6 +1056,13 @@ static HRESULT source_reader_media_stream_state_handler(struct source_reader *re case MEStreamSeeked: case MEStreamStarted: stream->state = STREAM_STATE_READY; + + if ((ptr = list_head(&stream->transforms))) + { + struct transform_entry *entry = LIST_ENTRY(ptr, struct transform_entry, entry); + if (FAILED(hr = source_reader_notify_transform(reader, stream, entry, MFT_MESSAGE_NOTIFY_START_OF_STREAM))) + WARN("Failed to drain pending samples, hr %#lx.\n", hr); + } break; case MEStreamStopped: stream->flags |= STREAM_FLAG_STOPPED; @@ -1403,8 +1178,6 @@ static struct stream_response * media_stream_detach_response(struct source_reade static struct stream_response *media_stream_pop_response(struct source_reader *reader, struct media_stream *stream) { struct stream_response *response; - IMFSample *sample; - HRESULT hr; LIST_FOR_EACH_ENTRY(response, &reader->responses, struct stream_response, entry) { @@ -1413,26 +1186,6 @@ static struct stream_response *media_stream_pop_response(struct source_reader *r if (!stream) stream = &reader->streams[response->stream_index]; - if (response->sample && stream->allocator) - { - /* Return allocation error to the caller, while keeping original response sample in for later. */ - if (SUCCEEDED(hr = IMFVideoSampleAllocatorEx_AllocateSample(stream->allocator, &sample))) - { - source_reader_copy_sample_buffer(response->sample, sample); - IMFSample_Release(response->sample); - response->sample = sample; - } - else - { - if (!(response = calloc(1, sizeof(*response)))) - return NULL; - - response->status = hr; - response->stream_flags = MF_SOURCE_READERF_ERROR; - return response; - } - } - return media_stream_detach_response(reader, response); } @@ -1637,10 +1390,18 @@ static void source_reader_release_responses(struct source_reader *reader, struct static void source_reader_flush_stream(struct source_reader *reader, DWORD stream_index) { struct media_stream *stream = &reader->streams[stream_index]; + struct list *ptr; + HRESULT hr; source_reader_release_responses(reader, stream); - if (stream->decoder.transform) - IMFTransform_ProcessMessage(stream->decoder.transform, MFT_MESSAGE_COMMAND_FLUSH, 0); + + if ((ptr = list_head(&stream->transforms))) + { + struct transform_entry *entry = LIST_ENTRY(ptr, struct transform_entry, entry); + if (FAILED(hr = source_reader_flush_transform_samples(reader, stream, entry))) + WARN("Failed to drain pending samples, hr %#lx.\n", hr); + } + stream->requests = 0; } @@ -1677,8 +1438,6 @@ static HRESULT source_reader_flush(struct source_reader *reader, unsigned int in return hr; } -static HRESULT source_reader_setup_sample_allocator(struct source_reader *reader, unsigned int index); - static HRESULT WINAPI source_reader_async_commands_callback_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) { struct source_reader *reader = impl_from_async_commands_callback_IMFAsyncCallback(iface); @@ -1708,15 +1467,7 @@ static HRESULT WINAPI source_reader_async_commands_callback_Invoke(IMFAsyncCallb { stream = &reader->streams[stream_index]; - if (!stream->allocator) - { - hr = source_reader_setup_sample_allocator(reader, stream_index); - - if (FAILED(hr)) - WARN("Failed to setup the sample allocator, hr %#lx.\n", hr); - } - - if (SUCCEEDED(hr) && !(report_sample = source_reader_get_read_result(reader, stream, command->u.read.flags, &status, + if (!(report_sample = source_reader_get_read_result(reader, stream, command->u.read.flags, &status, &stream_index, &stream_flags, ×tamp, &sample))) { stream->requests++; @@ -1847,7 +1598,6 @@ static ULONG WINAPI src_reader_Release(IMFSourceReaderEx *iface) { struct source_reader *reader = impl_from_IMFSourceReaderEx(iface); ULONG refcount = InterlockedDecrement(&reader->public_refcount); - unsigned int i; TRACE("%p, refcount %lu.\n", iface, refcount); @@ -1867,22 +1617,7 @@ static ULONG WINAPI src_reader_Release(IMFSourceReaderEx *iface) LeaveCriticalSection(&reader->cs); } - for (i = 0; i < reader->stream_count; ++i) - { - struct media_stream *stream = &reader->streams[i]; - IMFVideoSampleAllocatorCallback *callback; - - if (!stream->allocator) - continue; - - if (SUCCEEDED(IMFVideoSampleAllocatorEx_QueryInterface(stream->allocator, &IID_IMFVideoSampleAllocatorCallback, - (void **)&callback))) - { - IMFVideoSampleAllocatorCallback_SetCallback(callback, NULL); - IMFVideoSampleAllocatorCallback_Release(callback); - } - } - + MFUnlockWorkQueue(reader->queue); source_reader_release(reader); } @@ -2082,6 +1817,8 @@ static HRESULT source_reader_get_source_type_handler(struct source_reader *reade static HRESULT source_reader_set_compatible_media_type(struct source_reader *reader, DWORD index, IMFMediaType *type) { + struct media_stream *stream = &reader->streams[index]; + struct transform_entry *entry, *next; IMFMediaTypeHandler *type_handler; IMFMediaType *native_type; BOOL type_set = FALSE; @@ -2089,7 +1826,7 @@ static HRESULT source_reader_set_compatible_media_type(struct source_reader *rea DWORD flags; HRESULT hr; - if (FAILED(hr = IMFMediaType_IsEqual(type, reader->streams[index].current, &flags))) + if (FAILED(hr = IMFMediaType_IsEqual(type, stream->current, &flags))) return hr; if (!(flags & MF_MEDIATYPE_EQUAL_MAJOR_TYPES)) @@ -2099,6 +1836,17 @@ static HRESULT source_reader_set_compatible_media_type(struct source_reader *rea if (flags & MF_MEDIATYPE_EQUAL_FORMAT_DATA) return S_OK; + if (stream->transform_service) + { + IMFTransform_Release(stream->transform_service); + stream->transform_service = NULL; + } + LIST_FOR_EACH_ENTRY_SAFE(entry, next, &stream->transforms, struct transform_entry, entry) + { + list_remove(&entry->entry); + transform_entry_destroy(entry); + } + if (FAILED(hr = source_reader_get_source_type_handler(reader, index, &type_handler))) return hr; @@ -2109,7 +1857,7 @@ static HRESULT source_reader_set_compatible_media_type(struct source_reader *rea if (SUCCEEDED(IMFMediaType_IsEqual(native_type, type, &flags)) && (flags & compare_flags) == compare_flags) { if ((type_set = SUCCEEDED(IMFMediaTypeHandler_SetCurrentMediaType(type_handler, native_type)))) - IMFMediaType_CopyAllItems(native_type, (IMFAttributes *)reader->streams[index].current); + IMFMediaType_CopyAllItems(native_type, (IMFAttributes *)stream->current); } IMFMediaType_Release(native_type); @@ -2120,233 +1868,229 @@ static HRESULT source_reader_set_compatible_media_type(struct source_reader *rea return type_set ? S_OK : S_FALSE; } -static HRESULT source_reader_create_sample_allocator_attributes(const struct source_reader *reader, - struct media_stream *stream, IMFAttributes **attributes) +static BOOL source_reader_allow_video_processor(struct source_reader *reader, BOOL *advanced) { - UINT32 reader_shared = 0, reader_shared_without_mutex = 0; - UINT32 output_shared = 0, output_shared_without_mutex = 0; - HRESULT hr; + UINT32 value; - if (FAILED(hr = MFCreateAttributes(attributes, 1))) - return hr; - - IMFAttributes_GetUINT32(reader->attributes, &MF_SA_D3D11_SHARED, &reader_shared); - IMFAttributes_GetUINT32(reader->attributes, &MF_SA_D3D11_SHARED_WITHOUT_MUTEX, &reader_shared_without_mutex); - - if (stream->decoder.transform) - { - IMFAttributes *output_attributes; - - if (SUCCEEDED(IMFTransform_GetOutputStreamAttributes(stream->decoder.transform, 0, &output_attributes))) - { - IMFAttributes_GetUINT32(output_attributes, &MF_SA_D3D11_SHARED, &output_shared); - IMFAttributes_GetUINT32(output_attributes, &MF_SA_D3D11_SHARED_WITHOUT_MUTEX, &output_shared_without_mutex); - IMFAttributes_Release(output_attributes); - } - } + *advanced = FALSE; + if (!reader->attributes) + return FALSE; - if (reader_shared_without_mutex || output_shared_without_mutex) - hr = IMFAttributes_SetUINT32(*attributes, &MF_SA_D3D11_SHARED_WITHOUT_MUTEX, TRUE); - else if (reader_shared || output_shared) - hr = IMFAttributes_SetUINT32(*attributes, &MF_SA_D3D11_SHARED, TRUE); + if (SUCCEEDED(IMFAttributes_GetUINT32(reader->attributes, &MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, &value))) + *advanced = value; + if (SUCCEEDED(IMFAttributes_GetUINT32(reader->attributes, &MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, &value))) + return value || *advanced; - return hr; + return *advanced; } -static HRESULT source_reader_setup_sample_allocator(struct source_reader *reader, unsigned int index) +static HRESULT source_reader_create_transform(struct source_reader *reader, BOOL decoder, BOOL allow_processor, + IMFMediaType *input_type, IMFMediaType *output_type, struct transform_entry **out) { - struct media_stream *stream = &reader->streams[index]; - IMFAttributes *attributes = NULL; - GUID major = { 0 }; + MFT_REGISTER_TYPE_INFO in_type, out_type; + struct transform_entry *entry; + IMFActivate **activates; + GUID category; + IMFTransform *transform; + UINT i, count; HRESULT hr; - IMFMediaType_GetMajorType(stream->current, &major); - if (!IsEqualGUID(&major, &MFMediaType_Video)) - return S_OK; + if (FAILED(hr = IMFMediaType_GetMajorType(input_type, &in_type.guidMajorType)) + || FAILED(hr = IMFMediaType_GetGUID(input_type, &MF_MT_SUBTYPE, &in_type.guidSubtype))) + return hr; + if (FAILED(hr = IMFMediaType_GetMajorType(output_type, &out_type.guidMajorType)) + || FAILED(hr = IMFMediaType_GetGUID(output_type, &MF_MT_SUBTYPE, &out_type.guidSubtype))) + return hr; - if (!(reader->flags & SOURCE_READER_HAS_DEVICE_MANAGER)) - return S_OK; + if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Video)) + category = decoder ? MFT_CATEGORY_VIDEO_DECODER : MFT_CATEGORY_VIDEO_PROCESSOR; + else if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Audio)) + category = decoder ? MFT_CATEGORY_AUDIO_DECODER : MFT_CATEGORY_AUDIO_EFFECT; + else + return MF_E_TOPO_CODEC_NOT_FOUND; + + if (!(entry = calloc(1, sizeof(*entry)))) + return E_OUTOFMEMORY; + list_init(&entry->entry); + entry->category = category; - if (!stream->allocator) + if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Audio)) { - if (FAILED(hr = MFCreateVideoSampleAllocatorEx(&IID_IMFVideoSampleAllocatorEx, (void **)&stream->allocator))) + UINT32 bytes_per_second; + + /* decoders require to have MF_MT_AUDIO_BITS_PER_SAMPLE attribute set, but the source reader doesn't */ + if (FAILED(IMFMediaType_GetItem(output_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, NULL))) { - WARN("Failed to create sample allocator, hr %#lx.\n", hr); - return hr; + if (IsEqualGUID(&out_type.guidSubtype, &MFAudioFormat_PCM)) + IMFMediaType_SetUINT32(output_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, 16); + else if (IsEqualGUID(&out_type.guidSubtype, &MFAudioFormat_Float)) + IMFMediaType_SetUINT32(output_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, 32); } + + if (SUCCEEDED(IMFMediaType_GetUINT32(output_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &entry->min_buffer_size)) + && SUCCEEDED(IMFMediaType_GetUINT32(output_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &bytes_per_second))) + entry->min_buffer_size = max(entry->min_buffer_size, bytes_per_second); } - IMFVideoSampleAllocatorEx_UninitializeSampleAllocator(stream->allocator); - if (FAILED(hr = IMFVideoSampleAllocatorEx_SetDirectXManager(stream->allocator, reader->device_manager))) + if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Video) && IsEqualGUID(&out_type.guidSubtype, &MFVideoFormat_ABGR32) + && IsEqualGUID(&category, &MFT_CATEGORY_VIDEO_PROCESSOR)) { - WARN("Failed to set device manager, hr %#lx.\n", hr); - return hr; + /* The video processor isn't registered for MFVideoFormat_ABGR32, and native even only supports that format when + * D3D-enabled, we still want to instantiate a video processor in such case, so fixup the subtype for MFTEnumEx. + */ + WARN("Fixing up MFVideoFormat_ABGR32 subtype for the video processor\n"); + out_type.guidSubtype = MFVideoFormat_RGB32; } - if (FAILED(hr = source_reader_create_sample_allocator_attributes(reader, stream, &attributes))) - WARN("Failed to create allocator attributes, hr %#lx.\n", hr); - if (FAILED(hr = IMFVideoSampleAllocatorEx_InitializeSampleAllocatorEx(stream->allocator, 2, 8, - attributes, stream->current))) + count = 0; + if (SUCCEEDED(hr = MFTEnumEx(category, 0, &in_type, allow_processor ? NULL : &out_type, &activates, &count))) { - WARN("Failed to initialize sample allocator, hr %#lx.\n", hr); - } - - if (attributes) - IMFAttributes_Release(attributes); + if (!count) + { + free(entry); + return MF_E_TOPO_CODEC_NOT_FOUND; + } - return hr; -} + for (i = 0; i < count; i++) + { + IMFAttributes *attributes; + IMFMediaType *media_type; -static HRESULT source_reader_configure_decoder(struct source_reader *reader, DWORD index, const CLSID *clsid, - IMFMediaType *input_type, IMFMediaType *output_type) -{ - IMFMediaTypeHandler *type_handler; - unsigned int block_alignment = 0; - IMFTransform *transform = NULL; - IMFMediaType *type = NULL; - GUID major = { 0 }; - DWORD flags; - HRESULT hr; - int i = 0; + if (FAILED(hr = IMFActivate_ActivateObject(activates[i], &IID_IMFTransform, (void **)&transform))) + continue; - if (FAILED(hr = CoCreateInstance(clsid, NULL, CLSCTX_INPROC_SERVER, &IID_IMFTransform, (void **)&transform))) - { - WARN("Failed to create transform object, hr %#lx.\n", hr); - return hr; - } + if (!reader->device_manager || FAILED(IMFTransform_GetAttributes(transform, &attributes))) + entry->attributes_initialized = TRUE; + else + { + UINT32 d3d_aware = FALSE; - if (FAILED(hr = IMFTransform_SetInputType(transform, 0, input_type, 0))) - { - WARN("Failed to set decoder input type, hr %#lx.\n", hr); - IMFTransform_Release(transform); - return hr; - } + if (reader->flags & SOURCE_READER_DXGI_DEVICE_MANAGER) + { + if (SUCCEEDED(IMFAttributes_GetUINT32(attributes, &MF_SA_D3D11_AWARE, &d3d_aware)) && d3d_aware) + IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)reader->device_manager); + } + else if (reader->flags & SOURCE_READER_D3D9_DEVICE_MANAGER) + { + if (SUCCEEDED(IMFAttributes_GetUINT32(attributes, &MF_SA_D3D_AWARE, &d3d_aware)) && d3d_aware) + IMFTransform_ProcessMessage(transform, MFT_MESSAGE_SET_D3D_MANAGER, (ULONG_PTR)reader->device_manager); + } - /* Find the relevant output type. */ - while (IMFTransform_GetOutputAvailableType(transform, 0, i++, &type) == S_OK) - { - flags = 0; + IMFAttributes_SetUINT32(attributes, &MF_LOW_LATENCY, 1); + entry->attributes_initialized = !d3d_aware; + IMFAttributes_Release(attributes); + } - if (SUCCEEDED(IMFMediaType_IsEqual(type, output_type, &flags))) - { - if (flags & MF_MEDIATYPE_EQUAL_FORMAT_TYPES) + if (SUCCEEDED(hr = IMFTransform_SetInputType(transform, 0, input_type, 0)) + && SUCCEEDED(hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type))) { - if (SUCCEEDED(IMFTransform_SetOutputType(transform, 0, type, 0))) + if (SUCCEEDED(hr = update_media_type_from_upstream(output_type, media_type)) + && FAILED(hr = IMFTransform_SetOutputType(transform, 0, output_type, 0)) && allow_processor + && SUCCEEDED(hr = IMFTransform_GetOutputAvailableType(transform, 0, 0, &media_type))) { - if (SUCCEEDED(source_reader_get_source_type_handler(reader, index, &type_handler))) - { - IMFMediaTypeHandler_SetCurrentMediaType(type_handler, input_type); - IMFMediaTypeHandler_Release(type_handler); - } + struct transform_entry *converter; - if (FAILED(hr = IMFMediaType_CopyAllItems(type, (IMFAttributes *)reader->streams[index].current))) - WARN("Failed to copy attributes, hr %#lx.\n", hr); - if (SUCCEEDED(IMFMediaType_GetMajorType(type, &major)) && IsEqualGUID(&major, &MFMediaType_Audio)) - IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &block_alignment); - IMFMediaType_Release(type); + if (SUCCEEDED(hr = IMFTransform_SetOutputType(transform, 0, media_type, 0)) + && SUCCEEDED(hr = update_media_type_from_upstream(output_type, media_type)) + && SUCCEEDED(hr = source_reader_create_transform(reader, FALSE, FALSE, media_type, output_type, &converter))) + list_add_tail(&entry->entry, &converter->entry); - if (reader->streams[index].decoder.transform) - IMFTransform_Release(reader->streams[index].decoder.transform); - - reader->streams[index].decoder.transform = transform; - reader->streams[index].decoder.min_buffer_size = block_alignment; + IMFMediaType_Release(media_type); + } - return S_OK; + if (SUCCEEDED(hr)) + { + entry->transform = transform; + *out = entry; + break; } } - } - IMFMediaType_Release(type); - } - - WARN("Failed to find suitable decoder output type.\n"); - - IMFTransform_Release(transform); - - return MF_E_TOPO_CODEC_NOT_FOUND; -} - -static HRESULT source_reader_add_passthrough_transform(struct source_reader *reader, DWORD index, IMFMediaType *type) -{ - IMFTransform *transform; - HRESULT hr; - - if (FAILED(hr = create_passthrough_transform(&transform))) - return hr; - - if (FAILED(hr = IMFTransform_SetInputType(transform, 0, type, 0))) - { - WARN("Failed to set decoder input type, hr %#lx.\n", hr); - IMFTransform_Release(transform); - return hr; - } + IMFTransform_Release(transform); + } - if (FAILED(hr = IMFTransform_SetOutputType(transform, 0, type, 0))) - { - WARN("Failed to set decoder input type, hr %#lx.\n", hr); - IMFTransform_Release(transform); - return hr; + for (i = 0; i < count; ++i) + IMFActivate_Release(activates[i]); + CoTaskMemFree(activates); } - if (reader->streams[index].decoder.transform) - IMFTransform_Release(reader->streams[index].decoder.transform); - reader->streams[index].decoder.transform = transform; - reader->streams[index].decoder.min_buffer_size = 0; - - return S_OK; + if (FAILED(hr)) + free(entry); + return hr; } static HRESULT source_reader_create_decoder_for_stream(struct source_reader *reader, DWORD index, IMFMediaType *output_type) { - MFT_REGISTER_TYPE_INFO in_type, out_type; - CLSID *clsids, mft_clsid, category; - unsigned int i = 0, count; + BOOL enable_advanced = FALSE, allow_processor = TRUE; + struct media_stream *stream = &reader->streams[index]; IMFMediaType *input_type; + unsigned int i = 0; + GUID major; HRESULT hr; - /* TODO: should we check if the source type is compressed? */ - - if (FAILED(hr = IMFMediaType_GetMajorType(output_type, &out_type.guidMajorType))) - return hr; + if (SUCCEEDED(IMFMediaType_GetMajorType(output_type, &major)) && IsEqualGUID(&major, &MFMediaType_Video)) + allow_processor = source_reader_allow_video_processor(reader, &enable_advanced); - if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Video)) - { - category = MFT_CATEGORY_VIDEO_DECODER; - } - else if (IsEqualGUID(&out_type.guidMajorType, &MFMediaType_Audio)) + while (SUCCEEDED(hr = source_reader_get_native_media_type(reader, index, i++, &input_type))) { - category = MFT_CATEGORY_AUDIO_DECODER; - } - else - { - WARN("Unhandled major type %s.\n", debugstr_guid(&out_type.guidMajorType)); - return MF_E_TOPO_CODEC_NOT_FOUND; - } - - if (FAILED(hr = IMFMediaType_GetGUID(output_type, &MF_MT_SUBTYPE, &out_type.guidSubtype))) - return hr; - - in_type.guidMajorType = out_type.guidMajorType; + struct transform_entry *entry; - while (source_reader_get_native_media_type(reader, index, i++, &input_type) == S_OK) - { - if (SUCCEEDED(IMFMediaType_GetGUID(input_type, &MF_MT_SUBTYPE, &in_type.guidSubtype))) + /* first, try to append a single processor, then try again with a decoder and a processor */ + if ((allow_processor && SUCCEEDED(hr = source_reader_create_transform(reader, FALSE, FALSE, input_type, output_type, &entry))) + || SUCCEEDED(hr = source_reader_create_transform(reader, TRUE, allow_processor, input_type, output_type, &entry))) { - count = 0; - if (SUCCEEDED(hr = MFTEnum(category, 0, &in_type, &out_type, NULL, &clsids, &count)) && count) + struct list *ptr = list_head(&entry->entry); + struct transform_entry *service = ptr ? LIST_ENTRY(ptr, struct transform_entry, entry) : entry; + IMFMediaTypeHandler *type_handler; + + if (enable_advanced) + { + /* when advanced video processing is enabled, converters are exposed as stream transform service */ + stream->transform_service = service->transform; + IMFTransform_AddRef(stream->transform_service); + } + else { - mft_clsid = clsids[0]; - CoTaskMemFree(clsids); + /* when advanced video processing is disabled, only decoders are exposed as stream transform service */ + if (IsEqualGUID(&entry->category, &MFT_CATEGORY_AUDIO_DECODER) + || IsEqualGUID(&entry->category, &MFT_CATEGORY_VIDEO_DECODER)) + { + stream->transform_service = entry->transform; + IMFTransform_AddRef(stream->transform_service); - /* TODO: Should we iterate over all of them? */ - if (SUCCEEDED(source_reader_configure_decoder(reader, index, &mft_clsid, input_type, output_type))) + /* converters are hidden from the stream transforms */ + if (service != entry) + service->hidden = TRUE; + } + else { - IMFMediaType_Release(input_type); - return S_OK; + /* converters are hidden from the stream transforms */ + entry->hidden = TRUE; } + } + + /* move any additional transforms that have been created */ + list_move_head(&stream->transforms, &entry->entry); + list_add_head(&stream->transforms, &entry->entry); + + if (SUCCEEDED(source_reader_get_source_type_handler(reader, index, &type_handler))) + { + if (FAILED(hr = IMFMediaTypeHandler_SetCurrentMediaType(type_handler, input_type))) + WARN("Failed to set current input media type, hr %#lx\n", hr); + IMFMediaTypeHandler_Release(type_handler); + } + if (FAILED(hr = IMFTransform_GetOutputCurrentType(service->transform, 0, &output_type))) + WARN("Failed to get decoder output media type, hr %#lx\n", hr); + else + { + IMFMediaType_CopyAllItems(output_type, (IMFAttributes *)stream->current); + IMFMediaType_Release(output_type); } + + IMFMediaType_Release(input_type); + return S_OK; } IMFMediaType_Release(input_type); @@ -2359,6 +2103,7 @@ static HRESULT WINAPI src_reader_SetCurrentMediaType(IMFSourceReaderEx *iface, D IMFMediaType *type) { struct source_reader *reader = impl_from_IMFSourceReaderEx(iface); + IMFMediaType *output_type; HRESULT hr; TRACE("%p, %#lx, %p, %p.\n", iface, index, reserved, type); @@ -2378,24 +2123,25 @@ static HRESULT WINAPI src_reader_SetCurrentMediaType(IMFSourceReaderEx *iface, D if (index >= reader->stream_count) return MF_E_INVALIDSTREAMNUMBER; + if (FAILED(hr = MFCreateMediaType(&output_type))) + return hr; + if (FAILED(IMFMediaType_CopyAllItems(type, (IMFAttributes *)output_type))) + { + IMFMediaType_Release(output_type); + return hr; + } + /* FIXME: setting the output type while streaming should trigger a flush */ EnterCriticalSection(&reader->cs); - hr = source_reader_set_compatible_media_type(reader, index, type); + hr = source_reader_set_compatible_media_type(reader, index, output_type); if (hr == S_FALSE) - hr = source_reader_create_decoder_for_stream(reader, index, type); - else if (hr == S_OK) - hr = source_reader_add_passthrough_transform(reader, index, reader->streams[index].current); - - if (reader->streams[index].allocator) - { - IMFVideoSampleAllocatorEx_Release(reader->streams[index].allocator); - reader->streams[index].allocator = NULL; - } + hr = source_reader_create_decoder_for_stream(reader, index, output_type); LeaveCriticalSection(&reader->cs); + IMFMediaType_Release(output_type); return hr; } @@ -2491,15 +2237,7 @@ static HRESULT source_reader_read_sample(struct source_reader *reader, DWORD ind stream = &reader->streams[stream_index]; - if (!stream->allocator) - { - hr = source_reader_setup_sample_allocator(reader, stream_index); - - if (FAILED(hr)) - WARN("Failed to setup the sample allocator, hr %#lx.\n", hr); - } - - if (SUCCEEDED(hr) && !source_reader_get_read_result(reader, stream, flags, &hr, actual_index, stream_flags, + if (!source_reader_get_read_result(reader, stream, flags, &hr, actual_index, stream_flags, timestamp, sample)) { while (!source_reader_got_response_for_stream(reader, stream) && stream->state != STREAM_STATE_EOS) @@ -2675,11 +2413,8 @@ static HRESULT WINAPI src_reader_GetServiceForStream(IMFSourceReaderEx *iface, D if (index >= reader->stream_count) hr = MF_E_INVALIDSTREAMNUMBER; - else - { - obj = (IUnknown *)reader->streams[index].decoder.transform; - if (!obj) hr = E_NOINTERFACE; - } + else if (!(obj = (IUnknown *)reader->streams[index].transform_service)) + hr = E_NOINTERFACE; break; } @@ -2784,12 +2519,52 @@ static HRESULT WINAPI src_reader_RemoveAllTransformsForStream(IMFSourceReaderEx return E_NOTIMPL; } +static struct transform_entry *get_transform_at_index(struct media_stream *stream, UINT index) +{ + struct transform_entry *entry; + + LIST_FOR_EACH_ENTRY(entry, &stream->transforms, struct transform_entry, entry) + if (!entry->hidden && !index--) + return entry; + + return NULL; +} + static HRESULT WINAPI src_reader_GetTransformForStream(IMFSourceReaderEx *iface, DWORD stream_index, DWORD transform_index, GUID *category, IMFTransform **transform) { - FIXME("%p, %#lx, %#lx, %p, %p.\n", iface, stream_index, transform_index, category, transform); + struct source_reader *reader = impl_from_IMFSourceReaderEx(iface); + struct transform_entry *entry; + HRESULT hr; - return E_NOTIMPL; + TRACE("%p, %#lx, %#lx, %p, %p.\n", iface, stream_index, transform_index, category, transform); + + if (!transform) + return E_POINTER; + + EnterCriticalSection(&reader->cs); + + if (stream_index == MF_SOURCE_READER_FIRST_VIDEO_STREAM) + stream_index = reader->first_video_stream_index; + else if (stream_index == MF_SOURCE_READER_FIRST_AUDIO_STREAM) + stream_index = reader->first_audio_stream_index; + + if (stream_index >= reader->stream_count) + hr = MF_E_INVALIDSTREAMNUMBER; + else if (!(entry = get_transform_at_index(&reader->streams[stream_index], transform_index))) + hr = MF_E_INVALIDINDEX; + else + { + if (category) + *category = entry->category; + *transform = entry->transform; + IMFTransform_AddRef(*transform); + hr = S_OK; + } + + LeaveCriticalSection(&reader->cs); + + return hr; } static const IMFSourceReaderExVtbl srcreader_vtbl = @@ -2900,6 +2675,8 @@ static HRESULT create_source_reader_from_source(IMFMediaSource *source, IMFAttri IMFMediaType *src_type; BOOL selected; + list_init(&object->streams[i].transforms); + if (FAILED(hr = MFCreateMediaType(&object->streams[i].current))) break; diff --git a/dlls/mfreadwrite/tests/Makefile.in b/dlls/mfreadwrite/tests/Makefile.in index cada1bf22ff..c6262c01009 100644 --- a/dlls/mfreadwrite/tests/Makefile.in +++ b/dlls/mfreadwrite/tests/Makefile.in @@ -1,5 +1,5 @@ TESTDLL = mfreadwrite.dll -IMPORTS = ole32 user32 d3d9 dxva2 mfplat mfreadwrite mfuuid propsys +IMPORTS = ole32 user32 d3d11 d3d9 dxva2 mfplat mf mfreadwrite mfuuid propsys SOURCES = \ mfplat.c \ diff --git a/dlls/mfreadwrite/tests/mfplat.c b/dlls/mfreadwrite/tests/mfplat.c index 2095d2f054d..d262516056b 100644 --- a/dlls/mfreadwrite/tests/mfplat.c +++ b/dlls/mfreadwrite/tests/mfplat.c @@ -39,11 +39,47 @@ DEFINE_GUID(GUID_NULL, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); #include "mferror.h" #include "mfreadwrite.h" #include "propvarutil.h" +#include "initguid.h" #include "d3d9.h" #include "dxva2api.h" +#include "d3d11_4.h" +#include "evr.h" #include "wine/test.h" +DEFINE_MEDIATYPE_GUID(MFVideoFormat_TEST,MAKEFOURCC('T','E','S','T')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32,D3DFMT_A8B8G8R8); + +#define check_interface(a, b, c) check_interface_(__LINE__, a, b, c) +static void check_interface_(unsigned int line, void *iface_ptr, REFIID iid, BOOL supported) +{ + IUnknown *iface = iface_ptr; + HRESULT hr, expected_hr; + IUnknown *unk; + + expected_hr = supported ? S_OK : E_NOINTERFACE; + + hr = IUnknown_QueryInterface(iface, iid, (void **)&unk); + ok_(__FILE__, line)(hr == expected_hr, "Got hr %#lx, expected %#lx.\n", hr, expected_hr); + if (SUCCEEDED(hr)) + IUnknown_Release(unk); +} + +#define check_service_interface(a, b, c, d) check_service_interface_(__LINE__, a, b, c, d) +static void check_service_interface_(unsigned int line, void *iface_ptr, REFGUID service, REFIID iid, BOOL supported) +{ + IUnknown *iface = iface_ptr; + HRESULT hr, expected_hr; + IUnknown *unk; + + expected_hr = supported ? S_OK : E_NOINTERFACE; + + hr = MFGetService(iface, service, iid, (void **)&unk); + ok_(__FILE__, line)(hr == expected_hr, "Got hr %#lx, expected %#lx.\n", hr, expected_hr); + if (SUCCEEDED(hr)) + IUnknown_Release(unk); +} + struct attribute_desc { const GUID *key; @@ -165,6 +201,7 @@ static IDirect3DDevice9 *create_d3d9_device(IDirect3D9 *d3d9, HWND focus_window) } static HRESULT (WINAPI *pMFCreateMFByteStreamOnStream)(IStream *stream, IMFByteStream **bytestream); +static HRESULT (WINAPI *pMFCreateDXGIDeviceManager)(UINT *token, IMFDXGIDeviceManager **manager); static void init_functions(void) { @@ -172,6 +209,7 @@ static void init_functions(void) #define X(f) if (!(p##f = (void*)GetProcAddress(mod, #f))) return; X(MFCreateMFByteStreamOnStream); + X(MFCreateDXGIDeviceManager); #undef X } @@ -520,9 +558,15 @@ static HRESULT WINAPI test_source_Shutdown(IMFMediaSource *iface) { struct test_source *source = impl_from_IMFMediaSource(iface); HRESULT hr; + UINT i; hr = IMFMediaEventQueue_Shutdown(source->event_queue); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + for (i = 0; i < source->stream_count; ++i) + { + hr = IMFMediaEventQueue_Shutdown(source->streams[i]->event_queue); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + } return S_OK; } @@ -909,7 +953,7 @@ static void test_source_reader(const char *filename, bool video) hr = IMFMediaType_SetGUID(mediatype, &MF_MT_SUBTYPE, &MFVideoFormat_RGB32); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFSourceReader_SetCurrentMediaType(reader, MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, mediatype); - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); IMFMediaType_Release(mediatype); if (hr == S_OK) @@ -922,7 +966,7 @@ static void test_source_reader(const char *filename, bool video) ok(IsEqualGUID(&subtype, &MFVideoFormat_RGB32), "Got subtype %s.\n", debugstr_guid(&subtype)); hr = IMFMediaType_GetUINT32(mediatype, &MF_MT_DEFAULT_STRIDE, &stride); - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); todo_wine ok(stride == 160 * 4, "Got stride %u.\n", stride); IMFMediaType_Release(mediatype); @@ -1687,7 +1731,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad static const struct attribute_desc nv12_expect_advanced_desc[] = { ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12, .todo_value = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_NV12), ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1, .todo = TRUE), ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), @@ -1721,7 +1765,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad static const struct attribute_desc yuy2_expect_advanced_desc[] = { ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2, .todo_value = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2), ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1, .todo = TRUE), ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), @@ -1738,7 +1782,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad static const struct attribute_desc rgb32_expect_desc[] = { ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32, .todo_value = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1, .todo = TRUE), ATTR_UINT32(MF_MT_DEFAULT_STRIDE, 384, .todo = TRUE), @@ -1746,15 +1790,24 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad ATTR_UINT32(MF_MT_SAMPLE_SIZE, 36864, .todo = TRUE), {0}, }; - static const struct attribute_desc rgb32_expect_advanced_desc[] = + static const struct attribute_desc rgb32_expect_advanced_desc_todo1[] = { ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), - ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32, .todo_value = TRUE), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1, .todo = TRUE), ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), ATTR_UINT32(MF_MT_INTERLACE_MODE, 2, .todo = TRUE), }; + static const struct attribute_desc rgb32_expect_advanced_desc_todo2[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1), + ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), + ATTR_UINT32(MF_MT_INTERLACE_MODE, 2, .todo_value = TRUE), + }; IMFStreamDescriptor *video_stream; IMFSourceReaderEx *reader_ex; IMFAttributes *attributes; @@ -1840,16 +1893,21 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad init_media_type(media_type, nv12_stream_type_desc, 2); /* doesn't need the frame size */ hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); if (enable_advanced) - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); else + { + todo_wine_if(enable_processing) /* Wine enables advanced video processing in all cases */ ok(hr == MF_E_TOPO_CODEC_NOT_FOUND, "Unexpected hr %#lx.\n", hr); + } IMFMediaType_Release(media_type); hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (enable_advanced) check_media_type(media_type, nv12_expect_advanced_desc, -1); - else + else if (!enable_processing) + check_media_type(media_type, rgb32_stream_type_desc, -1); + else if (!winetest_platform_is_wine) /* Wine enables advanced video processing in all cases */ check_media_type(media_type, rgb32_stream_type_desc, -1); IMFMediaType_Release(media_type); @@ -1858,7 +1916,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad if (!enable_advanced) ok(hr == E_NOINTERFACE, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -1912,7 +1970,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad init_media_type(media_type, rgb32_stream_type_desc, 2); /* doesn't need the frame size */ hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); if (enable_processing || enable_advanced) - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); else ok(hr == MF_E_TOPO_CODEC_NOT_FOUND, "Unexpected hr %#lx.\n", hr); IMFMediaType_Release(media_type); @@ -1920,7 +1978,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (enable_advanced) - check_media_type(media_type, rgb32_expect_advanced_desc, -1); + check_media_type(media_type, rgb32_expect_advanced_desc_todo1, -1); else if (enable_processing) check_media_type(media_type, rgb32_expect_desc, -1); else @@ -1932,7 +1990,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad if (!enable_advanced) ok(hr == E_NOINTERFACE, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -1954,19 +2012,22 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad init_media_type(media_type, yuy2_stream_type_desc, 2); /* doesn't need the frame size */ hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); if (enable_advanced) - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); else + { + todo_wine_if(enable_processing) /* Wine enables advanced video processing in all cases */ ok(hr == MF_E_TOPO_CODEC_NOT_FOUND, "Unexpected hr %#lx.\n", hr); + } IMFMediaType_Release(media_type); hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (enable_advanced) check_media_type(media_type, yuy2_expect_advanced_desc, -1); - else if (enable_processing) - check_media_type(media_type, rgb32_expect_desc, -1); - else + else if (!enable_processing) check_media_type(media_type, nv12_stream_type_desc, -1); + else if (!winetest_platform_is_wine) /* Wine enables advanced video processing in all cases */ + check_media_type(media_type, rgb32_expect_desc, -1); IMFMediaType_Release(media_type); /* convert transform is only exposed with MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING */ @@ -1974,7 +2035,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad if (!enable_advanced) ok(hr == E_NOINTERFACE, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -1994,9 +2055,9 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, &category, &transform); if (!enable_advanced) - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -2013,7 +2074,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad } hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, &category, &transform); - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); IMFSourceReaderEx_Release(reader_ex); IMFSourceReader_Release(reader); @@ -2058,7 +2119,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad init_media_type(media_type, rgb32_stream_type_desc, 2); /* doesn't need the frame size */ hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); if (enable_processing || enable_advanced) - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); else todo_wine ok(hr == MF_E_INVALIDMEDIATYPE, "Unexpected hr %#lx.\n", hr); IMFMediaType_Release(media_type); @@ -2066,11 +2127,11 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (enable_advanced) - check_media_type(media_type, rgb32_expect_advanced_desc, -1); - else if (enable_processing) - check_media_type(media_type, rgb32_expect_desc, -1); - else + check_media_type(media_type, rgb32_expect_advanced_desc_todo2, -1); + else if (!enable_processing) check_media_type(media_type, h264_stream_type_desc, -1); + else if (!winetest_platform_is_wine) /* Wine enables advanced video processing in all cases */ + check_media_type(media_type, rgb32_expect_desc, -1); IMFMediaType_Release(media_type); /* the exposed transform is the H264 decoder or the converter with MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING */ @@ -2078,7 +2139,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad if (!enable_processing && !enable_advanced) ok(hr == E_NOINTERFACE, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -2106,9 +2167,9 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, &category, &transform); if (!enable_processing && !enable_advanced) - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -2127,9 +2188,9 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad /* the video processor can be accessed at index 1 with MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING */ hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, &category, &transform); if (!enable_advanced) - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); else - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -2146,7 +2207,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad } hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 2, &category, &transform); - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); IMFSourceReaderEx_Release(reader_ex); /* H264 -> NV12 conversion */ @@ -2195,7 +2256,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, &category, &transform); - todo_wine ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); if (hr == S_OK) { hr = IMFTransform_GetInputCurrentType(transform, 0, &media_type); @@ -2225,7 +2286,7 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad } hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, &category, &transform); - todo_wine ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); + ok(hr == MF_E_INVALIDINDEX, "Unexpected hr %#lx.\n", hr); IMFSourceReaderEx_Release(reader_ex); skip_tests: @@ -2238,6 +2299,1239 @@ static void test_source_reader_transforms(BOOL enable_processing, BOOL enable_ad winetest_pop_context(); } +static BOOL test_decoder_d3d_aware; +static BOOL test_decoder_d3d11_aware; +static BOOL test_decoder_got_d3d_manager; +static BOOL test_decoder_allocate_samples; +static IDirect3DDeviceManager9 *expect_d3d9_manager; +static IMFDXGIDeviceManager *expect_dxgi_manager; + +struct test_decoder +{ + IMFTransform IMFTransform_iface; + LONG refcount; + + IMFAttributes *attributes; + IMFMediaType *input_type; + IMFMediaType *output_type; + + MFVIDEOFORMAT output_format; + HRESULT next_output; +}; + +static struct test_decoder *test_decoder_from_IMFTransform(IMFTransform *iface) +{ + return CONTAINING_RECORD(iface, struct test_decoder, IMFTransform_iface); +} + +static HRESULT WINAPI test_decoder_QueryInterface(IMFTransform *iface, REFIID iid, void **out) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + + if (IsEqualGUID(iid, &IID_IUnknown) || + IsEqualGUID(iid, &IID_IMFTransform)) + { + IMFTransform_AddRef(&decoder->IMFTransform_iface); + *out = &decoder->IMFTransform_iface; + return S_OK; + } + + *out = NULL; + return E_NOINTERFACE; +} + +static ULONG WINAPI test_decoder_AddRef(IMFTransform *iface) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + ULONG refcount = InterlockedIncrement(&decoder->refcount); + return refcount; +} + +static ULONG WINAPI test_decoder_Release(IMFTransform *iface) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + ULONG refcount = InterlockedDecrement(&decoder->refcount); + + if (!refcount) + { + if (decoder->input_type) + IMFMediaType_Release(decoder->input_type); + if (decoder->output_type) + IMFMediaType_Release(decoder->output_type); + free(decoder); + } + + return refcount; +} + +static HRESULT WINAPI test_decoder_GetStreamLimits(IMFTransform *iface, DWORD *input_minimum, + DWORD *input_maximum, DWORD *output_minimum, DWORD *output_maximum) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) +{ + *inputs = *outputs = 1; + return S_OK; +} + +static HRESULT WINAPI test_decoder_GetStreamIDs(IMFTransform *iface, DWORD input_size, DWORD *inputs, + DWORD output_size, DWORD *outputs) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + UINT64 frame_size; + GUID subtype; + + if (!decoder->output_type || FAILED(IMFMediaType_GetUINT64(decoder->output_type, &MF_MT_FRAME_SIZE, &frame_size))) + frame_size = (UINT64)96 << 32 | 96; + if (!decoder->output_type || FAILED(IMFMediaType_GetGUID(decoder->output_type, &MF_MT_SUBTYPE, &subtype))) + subtype = MFVideoFormat_YUY2; + + memset(info, 0, sizeof(*info)); + if (test_decoder_allocate_samples) + info->dwFlags |= MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; + return MFCalculateImageSize(&MFVideoFormat_RGB32, (UINT32)frame_size, frame_size >> 32, (UINT32 *)&info->cbSize); +} + +static HRESULT WINAPI test_decoder_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + if (!(*attributes = decoder->attributes)) + return E_NOTIMPL; + IMFAttributes_AddRef(*attributes); + return S_OK; +} + +static HRESULT WINAPI test_decoder_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) +{ + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_DeleteInputStream(IMFTransform *iface, DWORD id) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, + IMFMediaType **type) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static void test_decoder_set_output_format(IMFTransform *iface, const MFVIDEOFORMAT *output_format) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + decoder->output_format = *output_format; +} + +static HRESULT WINAPI test_decoder_GetOutputAvailableType(IMFTransform *iface, DWORD id, + DWORD index, IMFMediaType **type) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + const GUID subtypes[] = + { + MFVideoFormat_NV12, + MFVideoFormat_YUY2, + }; + MFVIDEOFORMAT format = + { + .dwSize = sizeof(format), + .videoInfo = + { + .dwWidth = 96, + .dwHeight = 96, + }, + }; + HRESULT hr; + + *type = NULL; + if (index >= ARRAY_SIZE(subtypes)) + return MF_E_NO_MORE_TYPES; + + if (decoder->output_format.dwSize) + format = decoder->output_format; + format.guidFormat = subtypes[index]; + + hr = MFCreateMediaType(type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFInitMediaTypeFromMFVideoFormat(*type, &format, sizeof(format)); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + return hr; +} + +static HRESULT WINAPI test_decoder_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + if (flags & MFT_SET_TYPE_TEST_ONLY) + return S_OK; + if (decoder->input_type) + IMFMediaType_Release(decoder->input_type); + if ((decoder->input_type = type)) + IMFMediaType_AddRef(decoder->input_type); + return S_OK; +} + +static HRESULT WINAPI test_decoder_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + GUID subtype; + HRESULT hr; + + if (type && SUCCEEDED(hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &subtype)) + && (IsEqualGUID(&subtype, &MFVideoFormat_RGB32) + || IsEqualGUID(&subtype, &MFVideoFormat_ABGR32))) + return MF_E_INVALIDMEDIATYPE; + + if (flags & MFT_SET_TYPE_TEST_ONLY) + return S_OK; + if (decoder->output_type) + IMFMediaType_Release(decoder->output_type); + if ((decoder->output_type = type)) + IMFMediaType_AddRef(decoder->output_type); + return S_OK; +} + +static HRESULT WINAPI test_decoder_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + if (!(*type = decoder->input_type)) + return MF_E_TRANSFORM_TYPE_NOT_SET; + IMFMediaType_AddRef(*type); + return S_OK; +} + +static HRESULT WINAPI test_decoder_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + if (!(*type = decoder->output_type)) + return MF_E_TRANSFORM_TYPE_NOT_SET; + IMFMediaType_AddRef(*type); + return S_OK; +} + +static HRESULT WINAPI test_decoder_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_GetOutputStatus(IMFTransform *iface, DWORD *flags) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) +{ + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; +} + +static HRESULT WINAPI test_decoder_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) +{ + switch (message) + { + case MFT_MESSAGE_COMMAND_FLUSH: + case MFT_MESSAGE_NOTIFY_BEGIN_STREAMING: + case MFT_MESSAGE_NOTIFY_END_STREAMING: + case MFT_MESSAGE_NOTIFY_END_OF_STREAM: + case MFT_MESSAGE_NOTIFY_START_OF_STREAM: + return S_OK; + + case MFT_MESSAGE_SET_D3D_MANAGER: + ok(test_decoder_d3d_aware || test_decoder_d3d11_aware, "Unexpected call.\n"); + if (!param) + return S_OK; + + if (test_decoder_d3d_aware) + { + IDirect3DDeviceManager9 *manager; + HRESULT hr; + + hr = IUnknown_QueryInterface((IUnknown *)param, &IID_IDirect3DDeviceManager9, (void **)&manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(manager == expect_d3d9_manager, "got manager %p\n", manager); + IDirect3DDeviceManager9_Release(manager); + + test_decoder_got_d3d_manager = TRUE; + } + if (test_decoder_d3d11_aware) + { + IMFDXGIDeviceManager *manager; + HRESULT hr; + + hr = IUnknown_QueryInterface((IUnknown *)param, &IID_IMFDXGIDeviceManager, (void **)&manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(manager == expect_dxgi_manager, "got manager %p\n", manager); + IMFDXGIDeviceManager_Release(manager); + + test_decoder_got_d3d_manager = TRUE; + } + return S_OK; + + default: + ok(0, "Unexpected call.\n"); + return E_NOTIMPL; + } +} + +static HRESULT WINAPI test_decoder_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) +{ + return S_OK; +} + +static void test_decoder_set_next_output(IMFTransform *iface, HRESULT hr) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + decoder->next_output = hr; +} + +static HRESULT WINAPI test_decoder_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, + MFT_OUTPUT_DATA_BUFFER *data, DWORD *status) +{ + struct test_decoder *decoder = test_decoder_from_IMFTransform(iface); + IMFMediaBuffer *buffer; + IUnknown *unknown; + HRESULT hr; + + if (test_decoder_allocate_samples) + { + ok(!data->pSample, "Unexpected sample\n"); + + hr = MFCreateSample(&data->pSample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = MFCreateMemoryBuffer(96 * 96 * 4, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFSample_AddBuffer(data->pSample, buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaBuffer_Release(buffer); + } + else + { + ok(!!data->pSample, "Missing sample\n"); + + hr = IMFSample_GetBufferByIndex(data->pSample, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_interface(buffer, &IID_IMF2DBuffer2, TRUE); + check_interface(buffer, &IID_IMFGetService, TRUE); + check_interface(buffer, &IID_IMFDXGIBuffer, FALSE); + hr = MFGetService((IUnknown *)buffer, &MR_BUFFER_SERVICE, &IID_IDirect3DSurface9, (void **)&unknown); + ok(hr == E_NOTIMPL, "Unexpected hr %#lx.\n", hr); + IMFMediaBuffer_Release(buffer); + } + + if (decoder->next_output == MF_E_TRANSFORM_STREAM_CHANGE) + { + data[0].dwStatus = MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; + decoder->next_output = S_OK; + return MF_E_TRANSFORM_STREAM_CHANGE; + } + + if (decoder->next_output == S_OK) + { + decoder->next_output = MF_E_TRANSFORM_NEED_MORE_INPUT; + hr = IMFSample_SetSampleTime(data->pSample, 0); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + return S_OK; + } + + return decoder->next_output; +} + +static const IMFTransformVtbl test_decoder_vtbl = +{ + test_decoder_QueryInterface, + test_decoder_AddRef, + test_decoder_Release, + test_decoder_GetStreamLimits, + test_decoder_GetStreamCount, + test_decoder_GetStreamIDs, + test_decoder_GetInputStreamInfo, + test_decoder_GetOutputStreamInfo, + test_decoder_GetAttributes, + test_decoder_GetInputStreamAttributes, + test_decoder_GetOutputStreamAttributes, + test_decoder_DeleteInputStream, + test_decoder_AddInputStreams, + test_decoder_GetInputAvailableType, + test_decoder_GetOutputAvailableType, + test_decoder_SetInputType, + test_decoder_SetOutputType, + test_decoder_GetInputCurrentType, + test_decoder_GetOutputCurrentType, + test_decoder_GetInputStatus, + test_decoder_GetOutputStatus, + test_decoder_SetOutputBounds, + test_decoder_ProcessEvent, + test_decoder_ProcessMessage, + test_decoder_ProcessInput, + test_decoder_ProcessOutput, +}; + +static HRESULT WINAPI test_mft_factory_QueryInterface(IClassFactory *iface, REFIID riid, void **obj) +{ + if (IsEqualIID(riid, &IID_IClassFactory) || + IsEqualIID(riid, &IID_IUnknown)) + { + *obj = iface; + IClassFactory_AddRef(iface); + return S_OK; + } + + *obj = NULL; + return E_NOINTERFACE; +} + +static ULONG WINAPI test_mft_factory_AddRef(IClassFactory *iface) +{ + return 2; +} + +static ULONG WINAPI test_mft_factory_Release(IClassFactory *iface) +{ + return 1; +} + +static HRESULT WINAPI test_mft_factory_CreateInstance(IClassFactory *iface, IUnknown *outer, REFIID riid, void **obj) +{ + struct test_decoder *decoder; + HRESULT hr; + + if (!(decoder = calloc(1, sizeof(*decoder)))) + return E_OUTOFMEMORY; + decoder->IMFTransform_iface.lpVtbl = &test_decoder_vtbl; + decoder->refcount = 1; + + if (test_decoder_d3d_aware || test_decoder_d3d11_aware) + { + hr = MFCreateAttributes(&decoder->attributes, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + } + if (test_decoder_d3d_aware) + { + hr = IMFAttributes_SetUINT32(decoder->attributes, &MF_SA_D3D_AWARE, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + } + if (test_decoder_d3d11_aware) + { + hr = IMFAttributes_SetUINT32(decoder->attributes, &MF_SA_D3D11_AWARE, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + } + + *obj = &decoder->IMFTransform_iface; + return S_OK; +} + +static HRESULT WINAPI test_mft_factory_LockServer(IClassFactory *iface, BOOL fLock) +{ + return S_OK; +} + +static const IClassFactoryVtbl test_mft_factory_vtbl = +{ + test_mft_factory_QueryInterface, + test_mft_factory_AddRef, + test_mft_factory_Release, + test_mft_factory_CreateInstance, + test_mft_factory_LockServer, +}; + +static void test_source_reader_transform_stream_change(void) +{ + static const struct attribute_desc test_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_TEST), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + {0}, + }; + static const struct attribute_desc yuy2_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2), + {0}, + }; + static const struct attribute_desc yuy2_expect_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1, .todo = TRUE), + ATTR_UINT32(MF_MT_FIXED_SIZE_SAMPLES, 1, .todo = TRUE), + ATTR_UINT32(MF_MT_DEFAULT_STRIDE, 96 * 2, .todo = TRUE), + ATTR_UINT32(MF_MT_SAMPLE_SIZE, 96 * 96 * 2, .todo = TRUE), + {0}, + }; + static const struct attribute_desc yuy2_expect_new_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_YUY2), + ATTR_RATIO(MF_MT_FRAME_SIZE, 128, 128), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1), + ATTR_UINT32(MF_MT_FIXED_SIZE_SAMPLES, 1), + ATTR_UINT32(MF_MT_DEFAULT_STRIDE, 128 * 2), + ATTR_UINT32(MF_MT_SAMPLE_SIZE, 128 * 128 * 2), + {0}, + }; + const MFT_REGISTER_TYPE_INFO output_info[] = + { + {MFMediaType_Video, MFVideoFormat_NV12}, + {MFMediaType_Video, MFVideoFormat_YUY2}, + }; + const MFT_REGISTER_TYPE_INFO input_info[] = + { + {MFMediaType_Video, MFVideoFormat_TEST}, + }; + MFVIDEOFORMAT output_format = {.dwSize = sizeof(output_format)}; + IClassFactory factory = {.lpVtbl = &test_mft_factory_vtbl}; + IMFStreamDescriptor *video_stream; + IMFSourceReaderEx *reader_ex; + IMFTransform *test_decoder; + IMFMediaType *media_type; + IMFSourceReader *reader; + IMFMediaSource *source; + LONGLONG timestamp; + DWORD index, flags; + IMFSample *sample; + GUID category; + HRESULT hr; + + + hr = MFTRegisterLocal(&factory, &MFT_CATEGORY_VIDEO_DECODER, L"Test Decoder", 0, + ARRAY_SIZE(input_info), input_info, ARRAY_SIZE(output_info), output_info); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + /* test source reader with a custom source */ + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, test_stream_type_desc, -1); + hr = MFCreateStreamDescriptor(0, 1, &media_type, &video_stream); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + source = create_test_source(&video_stream, 1); + ok(!!source, "Failed to create test source.\n"); + IMFStreamDescriptor_Release(video_stream); + + hr = MFCreateSourceReaderFromMediaSource(source, NULL, &reader); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaSource_Release(source); + + /* skip tests on Win7 which misses IMFSourceReaderEx */ + hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); + ok(hr == S_OK || broken(hr == E_NOINTERFACE) /* Win7 */, "Unexpected hr %#lx.\n", hr); + if (broken(hr == E_NOINTERFACE)) + { + win_skip("missing IMFSourceReaderEx interface, skipping tests on Win7\n"); + goto skip_tests; + } + IMFSourceReaderEx_Release(reader_ex); + + hr = IMFSourceReader_SetStreamSelection(reader, 0, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + + hr = IMFSourceReader_GetNativeMediaType(reader, 0, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, yuy2_stream_type_desc, -1); + hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, yuy2_expect_desc, -1); + IMFMediaType_Release(media_type); + + + + hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, &category, NULL); + ok(hr == E_POINTER, "Unexpected hr %#lx.\n", hr); + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, NULL, &test_decoder); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(test_decoder->lpVtbl == &test_decoder_vtbl, "got unexpected transform\n"); + IMFSourceReaderEx_Release(reader_ex); + + fail_request_sample = FALSE; + + test_decoder_set_next_output(test_decoder, MF_E_TRANSFORM_STREAM_CHANGE); + + output_format.videoInfo.dwHeight = 128; + output_format.videoInfo.dwWidth = 128; + test_decoder_set_output_format(test_decoder, &output_format); + + sample = (void *)0xdeadbeef; + index = flags = timestamp = 0xdeadbeef; + hr = IMFSourceReader_ReadSample(reader, 0, 0, &index, &flags, ×tamp, &sample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(index == 0, "got %lu.\n", index); + ok(flags == MF_SOURCE_READERF_CURRENTMEDIATYPECHANGED, "got %lu.\n", flags); + ok(timestamp == 0, "got %I64d.\n", timestamp); + ok(sample != (void *)0xdeadbeef, "got %p.\n", sample); + IMFSample_Release(sample); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, yuy2_expect_new_desc, -1); + IMFMediaType_Release(media_type); + + fail_request_sample = TRUE; + + IMFTransform_Release(test_decoder); + +skip_tests: + IMFSourceReader_Release(reader); + + hr = MFTUnregisterLocal(&factory); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); +} + +static void test_source_reader_transforms_d3d9(void) +{ + static const struct attribute_desc test_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_TEST), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + {0}, + }; + static const struct attribute_desc rgb32_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + {0}, + }; + static const struct attribute_desc rgb32_expect_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1), + ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), + ATTR_UINT32(MF_MT_INTERLACE_MODE, 2, .todo = TRUE), + {0}, + }; + const MFT_REGISTER_TYPE_INFO output_info[] = + { + {MFMediaType_Video, MFVideoFormat_NV12}, + {MFMediaType_Video, MFVideoFormat_YUY2}, + }; + const MFT_REGISTER_TYPE_INFO input_info[] = + { + {MFMediaType_Video, MFVideoFormat_TEST}, + }; + IClassFactory factory = {.lpVtbl = &test_mft_factory_vtbl}; + IMFTransform *test_decoder, *video_processor; + IDirect3DDeviceManager9 *d3d9_manager; + IMFStreamDescriptor *video_stream; + IDirect3DDevice9 *d3d9_device; + IMFSourceReaderEx *reader_ex; + IMFAttributes *attributes; + IMFMediaType *media_type; + IMFSourceReader *reader; + IMFMediaBuffer *buffer; + IMFMediaSource *source; + LONGLONG timestamp; + DWORD index, flags; + IMFSample *sample; + IDirect3D9 *d3d9; + UINT32 value; + HWND window; + UINT token; + HRESULT hr; + + d3d9 = Direct3DCreate9(D3D_SDK_VERSION); + if (!d3d9) + { + skip("Failed to create a D3D9 object, skipping tests.\n"); + return; + } + + window = create_window(); + if (!(d3d9_device = create_d3d9_device(d3d9, window))) + { + skip("Failed to create a D3D9 device, skipping tests.\n"); + IDirect3D9_Release(d3d9); + DestroyWindow(window); + return; + } + IDirect3D9_Release(d3d9); + + test_decoder_d3d_aware = TRUE; + + hr = MFTRegisterLocal(&factory, &MFT_CATEGORY_VIDEO_DECODER, L"Test Decoder", 0, + ARRAY_SIZE(input_info), input_info, ARRAY_SIZE(output_info), output_info); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateAttributes(&attributes, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_SetUINT32(attributes, &MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = DXVA2CreateDirect3DDeviceManager9(&token, &d3d9_manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IDirect3DDeviceManager9_ResetDevice(d3d9_manager, d3d9_device, token); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IDirect3DDevice9_Release(d3d9_device); + + hr = IMFAttributes_SetUnknown(attributes, &MF_SOURCE_READER_D3D_MANAGER, (IUnknown *)d3d9_manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + expect_d3d9_manager = d3d9_manager; + + + /* test d3d aware decoder that doesn't allocate buffers */ + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, test_stream_type_desc, -1); + hr = MFCreateStreamDescriptor(0, 1, &media_type, &video_stream); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + source = create_test_source(&video_stream, 1); + ok(!!source, "Failed to create test source.\n"); + IMFStreamDescriptor_Release(video_stream); + + hr = MFCreateSourceReaderFromMediaSource(source, attributes, &reader); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attributes); + IMFMediaSource_Release(source); + + /* skip tests on Win7 which misses IMFSourceReaderEx */ + hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); + ok(hr == S_OK || broken(hr == E_NOINTERFACE) /* Win7 */, "Unexpected hr %#lx.\n", hr); + if (broken(hr == E_NOINTERFACE)) + { + win_skip("missing IMFSourceReaderEx interface, skipping tests on Win7\n"); + IMFSourceReader_Release(reader); + goto skip_tests; + } + + hr = IMFSourceReader_SetStreamSelection(reader, 0, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFSourceReader_GetNativeMediaType(reader, 0, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + ok(!test_decoder_got_d3d_manager, "d3d manager received\n"); + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, rgb32_stream_type_desc, -1); + hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + ok(!!test_decoder_got_d3d_manager, "d3d manager not received\n"); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, rgb32_expect_desc, -1); + IMFMediaType_Release(media_type); + + + /* video processor transform is not D3D9 aware on more recent Windows */ + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, NULL, &video_processor); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_processor->lpVtbl != &test_decoder_vtbl, "got unexpected transform\n"); + hr = IMFTransform_GetAttributes(video_processor, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_D3D_AWARE, &value); + todo_wine /* Wine exposes MF_SA_D3D_AWARE on the video processor, as Win7 */ + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_D3D11_AWARE, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 1, "got %u.\n", value); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &value); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attributes); + + hr = IMFTransform_GetOutputStreamAttributes(video_processor, 0, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetCount(attributes, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 0, "got %u.\n", value); + IMFAttributes_Release(attributes); + + hr = IMFTransform_GetOutputCurrentType(video_processor, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, rgb32_expect_desc, -1); + IMFMediaType_Release(media_type); + + IMFTransform_Release(video_processor); + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, NULL, &test_decoder); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(test_decoder->lpVtbl == &test_decoder_vtbl, "got unexpected transform\n"); + hr = IMFTransform_GetAttributes(test_decoder, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_D3D_AWARE, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 1, "got %u\n", value); + IMFAttributes_Release(attributes); + + + fail_request_sample = FALSE; + test_decoder_set_next_output(test_decoder, S_OK); + + sample = (void *)0xdeadbeef; + index = flags = timestamp = 0xdeadbeef; + hr = IMFSourceReader_ReadSample(reader, 0, 0, &index, &flags, ×tamp, &sample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(index == 0, "got %lu.\n", index); + ok(flags == 0, "got %lu.\n", flags); + ok(timestamp == 0, "got %I64d.\n", timestamp); + ok(sample != (void *)0xdeadbeef, "got %p.\n", sample); + + hr = IMFSample_GetBufferByIndex(sample, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_interface(buffer, &IID_IMF2DBuffer2, TRUE); + check_interface(buffer, &IID_IMFGetService, TRUE); + check_interface(buffer, &IID_IMFDXGIBuffer, FALSE); + check_service_interface(buffer, &MR_BUFFER_SERVICE, &IID_IDirect3DSurface9, TRUE); + IMFMediaBuffer_Release(buffer); + + IMFSample_Release(sample); + + fail_request_sample = TRUE; + + + /* video processor output stream attributes are left empty in D3D9 mode */ + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, NULL, &video_processor); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_processor->lpVtbl != &test_decoder_vtbl, "got unexpected transform\n"); + + hr = IMFTransform_GetAttributes(video_processor, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 6, "got %u.\n", value); + IMFAttributes_Release(attributes); + + hr = IMFTransform_GetOutputStreamAttributes(video_processor, 0, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetCount(attributes, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 0, "got %u.\n", value); + IMFAttributes_Release(attributes); + + IMFTransform_Release(video_processor); + + + IMFTransform_Release(test_decoder); + IMFSourceReaderEx_Release(reader_ex); + IMFSourceReader_Release(reader); + + + /* test d3d aware decoder that allocates buffers */ + + test_decoder_allocate_samples = TRUE; + + hr = MFCreateAttributes(&attributes, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_SetUINT32(attributes, &MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_SetUnknown(attributes, &MF_SOURCE_READER_D3D_MANAGER, (IUnknown *)d3d9_manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, test_stream_type_desc, -1); + hr = MFCreateStreamDescriptor(0, 1, &media_type, &video_stream); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + source = create_test_source(&video_stream, 1); + ok(!!source, "Failed to create test source.\n"); + IMFStreamDescriptor_Release(video_stream); + + hr = MFCreateSourceReaderFromMediaSource(source, attributes, &reader); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attributes); + IMFMediaSource_Release(source); + + hr = IMFSourceReader_SetStreamSelection(reader, 0, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, rgb32_stream_type_desc, -1); + hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + ok(!!test_decoder_got_d3d_manager, "d3d manager not received\n"); + + + hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, NULL, &test_decoder); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(test_decoder->lpVtbl == &test_decoder_vtbl, "got unexpected transform\n"); + IMFSourceReaderEx_Release(reader_ex); + + fail_request_sample = FALSE; + test_decoder_set_next_output(test_decoder, S_OK); + + sample = (void *)0xdeadbeef; + index = flags = timestamp = 0xdeadbeef; + hr = IMFSourceReader_ReadSample(reader, 0, 0, &index, &flags, ×tamp, &sample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(index == 0, "got %lu.\n", index); + ok(flags == 0, "got %lu.\n", flags); + ok(timestamp == 0, "got %I64d.\n", timestamp); + ok(sample != (void *)0xdeadbeef, "got %p.\n", sample); + + /* the buffer we received is a D3D buffer nonetheless */ + hr = IMFSample_GetBufferByIndex(sample, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_interface(buffer, &IID_IMF2DBuffer2, TRUE); + check_interface(buffer, &IID_IMFGetService, TRUE); + check_interface(buffer, &IID_IMFDXGIBuffer, FALSE); + check_service_interface(buffer, &MR_BUFFER_SERVICE, &IID_IDirect3DSurface9, TRUE); + IMFMediaBuffer_Release(buffer); + + IMFSample_Release(sample); + + fail_request_sample = TRUE; + + IMFTransform_Release(test_decoder); + IMFSourceReader_Release(reader); + + test_decoder_allocate_samples = FALSE; + + +skip_tests: + hr = MFTUnregisterLocal(&factory); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + IDirect3DDeviceManager9_Release(d3d9_manager); + DestroyWindow(window); + + test_decoder_got_d3d_manager = FALSE; + test_decoder_d3d_aware = FALSE; + expect_d3d9_manager = NULL; +} + +static void test_source_reader_transforms_d3d11(void) +{ + static const struct attribute_desc test_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_TEST), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + {0}, + }; + static const struct attribute_desc rgb32_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + {0}, + }; + static const struct attribute_desc rgb32_expect_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1), + ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), + ATTR_UINT32(MF_MT_INTERLACE_MODE, 2, .todo = TRUE), + {0}, + }; + static const struct attribute_desc abgr32_stream_type_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_ABGR32), + {0}, + }; + static const struct attribute_desc abgr32_expect_desc[] = + { + ATTR_GUID(MF_MT_MAJOR_TYPE, MFMediaType_Video), + ATTR_GUID(MF_MT_SUBTYPE, MFVideoFormat_ABGR32), + ATTR_RATIO(MF_MT_FRAME_SIZE, 96, 96), + ATTR_UINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, 1), + ATTR_UINT32(MF_MT_COMPRESSED, 0, .todo = TRUE), + ATTR_UINT32(MF_MT_INTERLACE_MODE, 2, .todo = TRUE), + {0}, + }; + const MFT_REGISTER_TYPE_INFO output_info[] = + { + {MFMediaType_Video, MFVideoFormat_NV12}, + {MFMediaType_Video, MFVideoFormat_YUY2}, + }; + const MFT_REGISTER_TYPE_INFO input_info[] = + { + {MFMediaType_Video, MFVideoFormat_TEST}, + }; + IClassFactory factory = {.lpVtbl = &test_mft_factory_vtbl}; + IMFTransform *test_decoder, *video_processor; + IMFStreamDescriptor *video_stream; + ID3D11Multithread *multithread; + IMFDXGIDeviceManager *manager; + IMFSourceReaderEx *reader_ex; + IMFAttributes *attributes; + IMFMediaType *media_type; + IMFSourceReader *reader; + IMFMediaBuffer *buffer; + IMFMediaSource *source; + UINT32 value, token; + ID3D11Device *d3d11; + LONGLONG timestamp; + DWORD index, flags; + IMFSample *sample; + HRESULT hr; + + if (!pMFCreateDXGIDeviceManager) + { + win_skip("MFCreateDXGIDeviceManager() is not available, skipping tests.\n"); + return; + } + + hr = D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_VIDEO_SUPPORT, NULL, 0, + D3D11_SDK_VERSION, &d3d11, NULL, NULL); + if (FAILED(hr)) + { + skip("D3D11 device creation failed, skipping tests.\n"); + return; + } + + hr = ID3D11Device_QueryInterface(d3d11, &IID_ID3D11Multithread, (void **)&multithread); + ok(hr == S_OK, "got %#lx\n", hr); + ID3D11Multithread_SetMultithreadProtected(multithread, TRUE); + ID3D11Multithread_Release(multithread); + + hr = pMFCreateDXGIDeviceManager(&token, &manager); + ok(hr == S_OK, "got %#lx\n", hr); + hr = IMFDXGIDeviceManager_ResetDevice(manager, (IUnknown *)d3d11, token); + ok(hr == S_OK, "got %#lx\n", hr); + ID3D11Device_Release(d3d11); + + + test_decoder_d3d11_aware = TRUE; + + hr = MFTRegisterLocal(&factory, &MFT_CATEGORY_VIDEO_DECODER, L"Test Decoder", 0, + ARRAY_SIZE(input_info), input_info, ARRAY_SIZE(output_info), output_info); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = MFCreateAttributes(&attributes, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_SetUINT32(attributes, &MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, 1); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_SetUnknown(attributes, &MF_SOURCE_READER_D3D_MANAGER, (IUnknown *)manager); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + expect_dxgi_manager = manager; + + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, test_stream_type_desc, -1); + hr = MFCreateStreamDescriptor(0, 1, &media_type, &video_stream); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + source = create_test_source(&video_stream, 1); + ok(!!source, "Failed to create test source.\n"); + IMFStreamDescriptor_Release(video_stream); + + hr = MFCreateSourceReaderFromMediaSource(source, attributes, &reader); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attributes); + IMFMediaSource_Release(source); + + /* skip tests on Win7 which misses IMFSourceReaderEx */ + hr = IMFSourceReader_QueryInterface(reader, &IID_IMFSourceReaderEx, (void **)&reader_ex); + ok(hr == S_OK || broken(hr == E_NOINTERFACE) /* Win7 */, "Unexpected hr %#lx.\n", hr); + if (broken(hr == E_NOINTERFACE)) + { + win_skip("missing IMFSourceReaderEx interface, skipping tests on Win7\n"); + IMFSourceReader_Release(reader); + goto skip_tests; + } + + hr = IMFSourceReader_SetStreamSelection(reader, 0, TRUE); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + hr = IMFSourceReader_GetNativeMediaType(reader, 0, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, test_stream_type_desc, -1); + IMFMediaType_Release(media_type); + ok(!test_decoder_got_d3d_manager, "d3d manager received\n"); + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, abgr32_stream_type_desc, -1); + hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); + ok(hr == S_OK || broken(hr == MF_E_INVALIDMEDIATYPE) /* needs a GPU */, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + if (hr == S_OK) + { + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, abgr32_expect_desc, -1); + IMFMediaType_Release(media_type); + } + + hr = MFCreateMediaType(&media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + init_media_type(media_type, rgb32_stream_type_desc, -1); + hr = IMFSourceReader_SetCurrentMediaType(reader, 0, NULL, media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + IMFMediaType_Release(media_type); + + hr = IMFSourceReader_GetCurrentMediaType(reader, 0, &media_type); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_media_type(media_type, rgb32_expect_desc, -1); + IMFMediaType_Release(media_type); + + + /* video processor output stream attributes are still empty */ + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, NULL, &video_processor); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_processor->lpVtbl != &test_decoder_vtbl, "got unexpected transform\n"); + + hr = IMFTransform_GetAttributes(video_processor, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &value); + ok(hr == MF_E_ATTRIBUTENOTFOUND, "Unexpected hr %#lx.\n", hr); + IMFAttributes_Release(attributes); + + hr = IMFTransform_GetOutputStreamAttributes(video_processor, 0, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + hr = IMFAttributes_GetCount(attributes, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 0, "got %u.\n", value); + IMFAttributes_Release(attributes); + + IMFTransform_Release(video_processor); + + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 0, NULL, &test_decoder); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(test_decoder->lpVtbl == &test_decoder_vtbl, "got unexpected transform\n"); + + fail_request_sample = FALSE; + test_decoder_set_next_output(test_decoder, S_OK); + + sample = (void *)0xdeadbeef; + index = flags = timestamp = 0xdeadbeef; + hr = IMFSourceReader_ReadSample(reader, 0, 0, &index, &flags, ×tamp, &sample); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(index == 0, "got %lu.\n", index); + ok(flags == 0, "got %lu.\n", flags); + ok(timestamp == 0, "got %I64d.\n", timestamp); + ok(sample != (void *)0xdeadbeef, "got %p.\n", sample); + + hr = IMFSample_GetBufferByIndex(sample, 0, &buffer); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + check_interface(buffer, &IID_IMF2DBuffer2, TRUE); + check_interface(buffer, &IID_IMFGetService, FALSE); + check_interface(buffer, &IID_IMFDXGIBuffer, TRUE); + IMFMediaBuffer_Release(buffer); + + IMFSample_Release(sample); + + fail_request_sample = TRUE; + + + /* video processor output stream attributes are now set with some defaults */ + + hr = IMFSourceReaderEx_GetTransformForStream(reader_ex, 0, 1, NULL, &video_processor); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(video_processor->lpVtbl != &test_decoder_vtbl, "got unexpected transform\n"); + + hr = IMFTransform_GetAttributes(video_processor, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value = 0xdeadbeef; + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 6, "got %u.\n", value); + IMFAttributes_Release(attributes); + + hr = IMFTransform_GetOutputStreamAttributes(video_processor, 0, &attributes); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + value = 0xdeadbeef; + hr = IMFAttributes_GetUINT32(attributes, &MF_SA_D3D11_BINDFLAGS, &value); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + ok(value == 1024, "got %u.\n", value); + IMFAttributes_Release(attributes); + + IMFTransform_Release(video_processor); + + + IMFSourceReaderEx_Release(reader_ex); + IMFSourceReader_Release(reader); + IMFTransform_Release(test_decoder); + + +skip_tests: + hr = MFTUnregisterLocal(&factory); + ok(hr == S_OK, "Unexpected hr %#lx.\n", hr); + + IMFDXGIDeviceManager_Release(manager); + + test_decoder_got_d3d_manager = FALSE; + test_decoder_d3d11_aware = FALSE; + expect_dxgi_manager = NULL; +} + START_TEST(mfplat) { HRESULT hr; @@ -2255,6 +3549,9 @@ START_TEST(mfplat) test_source_reader_transforms(FALSE, FALSE); test_source_reader_transforms(TRUE, FALSE); test_source_reader_transforms(FALSE, TRUE); + test_source_reader_transform_stream_change(); + test_source_reader_transforms_d3d9(); + test_source_reader_transforms_d3d11(); test_reader_d3d9(); test_sink_writer_create(); test_sink_writer_mp4(); diff --git a/dlls/mmdevapi/client.c b/dlls/mmdevapi/client.c index c3bb7018bd4..41a9655aa44 100644 --- a/dlls/mmdevapi/client.c +++ b/dlls/mmdevapi/client.c @@ -904,6 +904,9 @@ static HRESULT WINAPI client_GetService(IAudioClient3 *iface, REFIID riid, void if (!new_session) IUnknown_AddRef((IUnknown *)*ppv); + } else if (IsEqualIID(riid, &IID_IAudioClockAdjustment)) { + IAudioClockAdjustment_AddRef(&This->IAudioClockAdjustment_iface); + *ppv = &This->IAudioClockAdjustment_iface; } else { FIXME("stub %s\n", debugstr_guid(riid)); hr = E_NOINTERFACE; diff --git a/dlls/ntdll/unix/fsync.c b/dlls/ntdll/unix/fsync.c index c3da44e4f26..675f20c775e 100644 --- a/dlls/ntdll/unix/fsync.c +++ b/dlls/ntdll/unix/fsync.c @@ -171,7 +171,7 @@ int do_fsync(void) if (do_fsync_cached == -1) { syscall( __NR_futex_waitv, NULL, 0, 0, NULL, 0 ); - do_fsync_cached = getenv("WINEFSYNC") && atoi(getenv("WINEFSYNC")) && errno != ENOSYS; + do_fsync_cached = getenv("WINEFSYNC") && atoi(getenv("WINEFSYNC")) && errno != ENOSYS && errno != EPERM; } return do_fsync_cached; diff --git a/dlls/ntdll/unix/socket.c b/dlls/ntdll/unix/socket.c index 7f0512da627..329da8cd4fd 100644 --- a/dlls/ntdll/unix/socket.c +++ b/dlls/ntdll/unix/socket.c @@ -1004,6 +1004,14 @@ static NTSTATUS try_send( int fd, struct async_send_ioctl *async ) ERR( "failed to convert address\n" ); return STATUS_ACCESS_VIOLATION; } + if (sock_type == SOCK_DGRAM && ((unix_addr.addr.sa_family == AF_INET && !unix_addr.in.sin_port) + || (unix_addr.addr.sa_family == AF_INET6 && !unix_addr.in6.sin6_port))) + { + /* Sending to port 0 succeeds on Windows. Use 'discard' service instead so sendmsg() works on Unix + * while still goes through other parameters validation. */ + WARN( "Trying to use destination port 0, substituing 9.\n" ); + unix_addr.in.sin_port = htons( 9 ); + } #if defined(HAS_IPX) && defined(SOL_IPX) if (async->addr->sa_family == WS_AF_IPX) @@ -2526,6 +2534,34 @@ NTSTATUS sock_ioctl( HANDLE handle, HANDLE event, PIO_APC_ROUTINE apc, void *apc case IOCTL_AFD_WINE_SET_TCP_NODELAY: return do_setsockopt( handle, io, IPPROTO_TCP, TCP_NODELAY, in_buffer, in_size ); +#if defined(TCP_KEEPIDLE) + /* TCP_KEEPALIVE on Windows is often called TCP_KEEPIDLE on Unix */ + case IOCTL_AFD_WINE_GET_TCP_KEEPALIVE: + return do_getsockopt( handle, io, IPPROTO_TCP, TCP_KEEPIDLE, out_buffer, out_size ); + + case IOCTL_AFD_WINE_SET_TCP_KEEPALIVE: + return do_setsockopt( handle, io, IPPROTO_TCP, TCP_KEEPIDLE, in_buffer, in_size ); +#elif defined(TCP_KEEPALIVE) + /* Mac */ + case IOCTL_AFD_WINE_GET_TCP_KEEPALIVE: + return do_getsockopt( handle, io, IPPROTO_TCP, TCP_KEEPALIVE, out_buffer, out_size ); + + case IOCTL_AFD_WINE_SET_TCP_KEEPALIVE: + return do_setsockopt( handle, io, IPPROTO_TCP, TCP_KEEPALIVE, in_buffer, in_size ); +#endif + + case IOCTL_AFD_WINE_GET_TCP_KEEPINTVL: + return do_getsockopt( handle, io, IPPROTO_TCP, TCP_KEEPINTVL, out_buffer, out_size ); + + case IOCTL_AFD_WINE_SET_TCP_KEEPINTVL: + return do_setsockopt( handle, io, IPPROTO_TCP, TCP_KEEPINTVL, in_buffer, in_size ); + + case IOCTL_AFD_WINE_GET_TCP_KEEPCNT: + return do_getsockopt( handle, io, IPPROTO_TCP, TCP_KEEPCNT, out_buffer, out_size ); + + case IOCTL_AFD_WINE_SET_TCP_KEEPCNT: + return do_setsockopt( handle, io, IPPROTO_TCP, TCP_KEEPCNT, in_buffer, in_size ); + default: { if ((code >> 16) == FILE_DEVICE_NETWORK) diff --git a/dlls/quartz/tests/mpegaudio.c b/dlls/quartz/tests/mpegaudio.c index 804cbbcc3d1..9fa6796e096 100644 --- a/dlls/quartz/tests/mpegaudio.c +++ b/dlls/quartz/tests/mpegaudio.c @@ -905,6 +905,7 @@ struct testfilter unsigned int got_sample, got_new_segment, got_eos, got_begin_flush, got_end_flush; REFERENCE_TIME expected_start_time; REFERENCE_TIME expected_stop_time; + BOOL todo_time; }; static inline struct testfilter *impl_from_strmbase_filter(struct strmbase_filter *iface) @@ -1012,7 +1013,9 @@ static HRESULT WINAPI testsink_Receive(struct strmbase_sink *iface, IMediaSample ok(hr == S_OK, "Got hr %#lx.\n", hr); if (filter->got_sample == 1) { + todo_wine_if(filter->todo_time) ok(start == filter->expected_start_time, "Got start time %s.\n", wine_dbgstr_longlong(start)); + todo_wine_if(filter->todo_time) ok(stop == filter->expected_stop_time, "Got stop time %s.\n", wine_dbgstr_longlong(stop)); } @@ -1510,6 +1513,7 @@ static void test_streaming_events(IMediaControl *control, IPin *sink, testsink->expected_start_time = 0; testsink->expected_stop_time = 120000; + testsink->todo_time = TRUE; hr = IMemInputPin_Receive(input, sample); ok(hr == S_OK, "Got hr %#lx.\n", hr); hr = IMemInputPin_Receive(input, sample); diff --git a/dlls/quartz/tests/mpegvideo.c b/dlls/quartz/tests/mpegvideo.c index 1f17ddbe636..835bc5bec42 100644 --- a/dlls/quartz/tests/mpegvideo.c +++ b/dlls/quartz/tests/mpegvideo.c @@ -834,7 +834,6 @@ struct testfilter unsigned int got_sample, got_new_segment, got_eos, got_begin_flush, got_end_flush; REFERENCE_TIME expected_start_time; REFERENCE_TIME expected_stop_time; - BOOL todo_stop_time; }; static inline struct testfilter *impl_from_strmbase_filter(struct strmbase_filter *iface) @@ -942,7 +941,7 @@ static HRESULT WINAPI testsink_Receive(struct strmbase_sink *iface, IMediaSample { ok(start == filter->expected_start_time, "Got start time %s, expected %s.\n", wine_dbgstr_longlong(start), wine_dbgstr_longlong(filter->expected_start_time)); - todo_wine_if(filter->todo_stop_time) + todo_wine ok(stop == filter->expected_stop_time, "Got stop time %s, expected %s.\n", wine_dbgstr_longlong(stop), wine_dbgstr_longlong(filter->expected_stop_time)); } @@ -1202,7 +1201,7 @@ static void test_quality_control(IFilterGraph2 *graph, IBaseFilter *filter, testsource->qc = NULL; } -static void test_send_sample(IMemInputPin *input, IMediaSample *sample, const BYTE *data, LONG len, BOOL todo) +static void test_send_sample(IMemInputPin *input, IMediaSample *sample, const BYTE *data, LONG len) { BYTE *target_data; HRESULT hr; @@ -1217,8 +1216,7 @@ static void test_send_sample(IMemInputPin *input, IMediaSample *sample, const BY ok(hr == S_OK, "Got hr %#lx.\n", hr); hr = IMemInputPin_Receive(input, sample); - todo_wine_if(todo) /* 0xc00d6d61 is MF_E_TRANSFORM_STREAM_CHANGE */ - ok(hr == S_OK, "Got hr %#lx.\n", hr); + ok(hr == S_OK, "Got hr %#lx.\n", hr); } static void test_send_video(IMemInputPin *input, IMediaSample *sample) @@ -1226,7 +1224,7 @@ static void test_send_video(IMemInputPin *input, IMediaSample *sample) /* gst-launch-1.0 -v videotestsrc pattern=black num-buffers=10 ! video/x-raw,width=32,height=24 ! mpeg2enc ! filesink location=empty-es2.mpg */ /* then truncate to taste */ /* each 00 00 01 b3 or 00 00 01 00 starts a new frame, except the first 00 00 01 00 after a 00 00 01 b3 */ - static const BYTE empty_mpg_frame1[] = { + static const BYTE empty_mpg_frames[] = { 0x00, 0x00, 0x01, 0xb3, 0x02, 0x00, 0x18, 0x15, 0x02, 0xbf, 0x60, 0x9c, 0x00, 0x00, 0x01, 0xb8, 0x00, 0x08, 0x00, 0x40, 0x00, 0x00, 0x01, 0x00, 0x00, 0x0f, 0xff, 0xf8, @@ -1235,27 +1233,22 @@ static void test_send_video(IMemInputPin *input, IMediaSample *sample) 0x41, 0x28, 0x88, 0x13, 0xb9, 0x6f, 0xcf, 0xc1, 0x04, 0x03, 0xa0, 0x11, 0xb1, 0x41, 0x28, 0x88, 0x13, 0xb9, 0x6f, 0xa1, 0x4b, 0x9f, 0x48, 0x04, 0x10, 0x0e, 0x80, 0x46, 0xc5, 0x04, 0xa2, 0x20, 0x4e, 0xe5, 0x80, 0x41, 0x00, 0xe8, 0x04, 0x6c, 0x50, 0x4a, 0x22, 0x04, 0xee, 0x58, - }; - static const BYTE empty_mpg_frame2[] = { 0x00, 0x00, 0x01, 0x00, 0x00, 0x57, 0xff, 0xf9, 0x80, 0x00, 0x00, 0x01, 0x01, 0x0a, 0x79, 0xc0, 0x00, 0x00, 0x01, 0x02, 0x0a, 0x79, 0xc0, - }; - static const BYTE empty_mpg_frame3[] = { 0x00, 0x00, 0x01, 0x00, 0x00, 0x97, 0xff, 0xf9, 0x80, 0x00, 0x00, 0x01, 0x01, 0x0a, 0x79, 0xc0, - 0x00, 0x00, 0x01, 0x02, 0x0a, 0x79, 0xc0, 0x00, 0x00, 0x01, 0xb7, + 0x00, 0x00, 0x01, 0x02, 0x0a, 0x79, 0xc0, + }; + static const BYTE empty_mpg_eos[] = { + 0x00, 0x00, 0x01, 0xb7, }; HRESULT hr; IPin *pin; - /* frame 1 - it's a complete frame, but due to how MPEG framing works, the decoder doesn't know that */ - /* frame 2 - new frame starts, frame 1 can be emitted - but Wine gets confused by colorimetry and returns an error */ - /* frame 3 - Wine emits frames 1 and 2 */ - /* meanwhile, native won't emit anything until an unknown-sized internal buffer is filled, or EOS is announced */ - test_send_sample(input, sample, empty_mpg_frame1, ARRAY_SIZE(empty_mpg_frame1), FALSE); - test_send_sample(input, sample, empty_mpg_frame2, ARRAY_SIZE(empty_mpg_frame2), TRUE); - test_send_sample(input, sample, empty_mpg_frame3, ARRAY_SIZE(empty_mpg_frame3), FALSE); + /* native won't emit anything until an unknown-sized internal buffer is filled, or EOS is announced */ + test_send_sample(input, sample, empty_mpg_frames, ARRAY_SIZE(empty_mpg_frames)); + test_send_sample(input, sample, empty_mpg_eos, ARRAY_SIZE(empty_mpg_eos)); hr = IMemInputPin_QueryInterface(input, &IID_IPin, (void **)&pin); ok(hr == S_OK, "Got hr %#lx.\n", hr); @@ -1304,7 +1297,6 @@ static void test_sample_processing(IMediaControl *control, IMemInputPin *input, sink->expected_start_time = 0; sink->expected_stop_time = 0; - sink->todo_stop_time = FALSE; hr = IMediaSample_SetTime(sample, &sink->expected_start_time, &sink->expected_stop_time); ok(hr == S_OK, "Got hr %#lx.\n", hr); @@ -1342,7 +1334,6 @@ static void test_sample_processing(IMediaControl *control, IMemInputPin *input, sink->expected_start_time = 22222; sink->expected_stop_time = 22222; - sink->todo_stop_time = TRUE; test_send_video(input, sample); ok(sink->got_sample >= 1, "Got %u calls to Receive().\n", sink->got_sample); ok(sink->got_eos == 1, "Got %u calls to EndOfStream().\n", sink->got_eos); @@ -1412,7 +1403,6 @@ static void test_streaming_events(IMediaControl *control, IPin *sink, testsink->expected_start_time = 0; testsink->expected_stop_time = 0; - testsink->todo_stop_time = TRUE; test_send_video(input, sample); ok(testsink->got_sample >= 1, "Got %u calls to Receive().\n", testsink->got_sample); testsink->got_sample = 0; @@ -1440,7 +1430,6 @@ static void test_streaming_events(IMediaControl *control, IPin *sink, testsink->expected_start_time = 0; testsink->expected_stop_time = 0; - testsink->todo_stop_time = TRUE; test_send_video(input, sample); ok(testsink->got_sample >= 1, "Got %u calls to Receive().\n", testsink->got_sample); testsink->got_sample = 0; diff --git a/dlls/winegstreamer/Makefile.in b/dlls/winegstreamer/Makefile.in index a26ac0bff93..ee1bff4b085 100644 --- a/dlls/winegstreamer/Makefile.in +++ b/dlls/winegstreamer/Makefile.in @@ -23,6 +23,7 @@ SOURCES = \ video_processor.c \ wg_allocator.c \ wg_format.c \ + wg_media_type.c \ wg_muxer.c \ wg_parser.c \ wg_sample.c \ diff --git a/dlls/winegstreamer/audio_decoder.c b/dlls/winegstreamer/audio_decoder.c index 771eae465fe..734622a64ad 100644 --- a/dlls/winegstreamer/audio_decoder.c +++ b/dlls/winegstreamer/audio_decoder.c @@ -1,4 +1,4 @@ -/* AAC Decoder Transform +/* Audio Decoder Transform * * Copyright 2022 Rémi Bernon for CodeWeavers * @@ -36,8 +36,10 @@ WINE_DECLARE_DEBUG_CHANNEL(winediag); static WAVEFORMATEXTENSIBLE const audio_decoder_output_types[] = { - {.Format = {.wFormatTag = WAVE_FORMAT_IEEE_FLOAT, .wBitsPerSample = 32, .nSamplesPerSec = 48000, .nChannels = 2}}, - {.Format = {.wFormatTag = WAVE_FORMAT_PCM, .wBitsPerSample = 16, .nSamplesPerSec = 48000, .nChannels = 2}}, + {.Format = {.wFormatTag = WAVE_FORMAT_IEEE_FLOAT, .wBitsPerSample = 32, .nSamplesPerSec = 48000, .nChannels = 2, + .cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX)}}, + {.Format = {.wFormatTag = WAVE_FORMAT_PCM, .wBitsPerSample = 16, .nSamplesPerSec = 48000, .nChannels = 2, + .cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX)}}, }; static const UINT32 default_channel_mask[7] = @@ -73,25 +75,15 @@ static struct audio_decoder *impl_from_IMFTransform(IMFTransform *iface) static HRESULT try_create_wg_transform(struct audio_decoder *decoder) { - struct wg_format input_format, output_format; struct wg_transform_attrs attrs = {0}; if (decoder->wg_transform) + { wg_transform_destroy(decoder->wg_transform); - decoder->wg_transform = 0; - - mf_media_type_to_wg_format(decoder->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - mf_media_type_to_wg_format(decoder->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - if (!(decoder->wg_transform = wg_transform_create(&input_format, &output_format, &attrs))) - return E_FAIL; + decoder->wg_transform = 0; + } - return S_OK; + return wg_transform_create_mf(decoder->input_type, decoder->output_type, &attrs, &decoder->wg_transform); } static HRESULT WINAPI transform_QueryInterface(IMFTransform *iface, REFIID iid, void **out) @@ -292,6 +284,7 @@ static HRESULT WINAPI transform_GetOutputAvailableType(IMFTransform *iface, DWOR wfx.SubFormat = MFAudioFormat_Base; wfx.SubFormat.Data1 = wfx.Format.wFormatTag; wfx.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE; + wfx.Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX); wfx.dwChannelMask = default_channel_mask[wfx.Format.nChannels]; } @@ -391,10 +384,17 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; - if (!decoder->output_type && FAILED(hr = MFCreateMediaType(&decoder->output_type))) - return hr; + if (!wfx.Format.nBlockAlign) + wfx.Format.nBlockAlign = wfx.Format.wBitsPerSample * wfx.Format.nChannels / 8; + if (!wfx.Format.nAvgBytesPerSec) + wfx.Format.nAvgBytesPerSec = wfx.Format.nBlockAlign * wfx.Format.nSamplesPerSec; - if (FAILED(hr = IMFMediaType_CopyAllItems(type, (IMFAttributes *)decoder->output_type))) + if (decoder->output_type) + { + IMFMediaType_Release(decoder->output_type); + decoder->output_type = NULL; + } + if (FAILED(hr = MFCreateAudioMediaType(&wfx.Format, (IMFAudioMediaType **)&decoder->output_type))) return hr; if (FAILED(hr = try_create_wg_transform(decoder))) @@ -489,8 +489,22 @@ static HRESULT WINAPI transform_ProcessEvent(IMFTransform *iface, DWORD id, IMFM static HRESULT WINAPI transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) { - FIXME("iface %p, message %#x, param %p stub!\n", iface, message, (void *)param); - return S_OK; + struct audio_decoder *decoder = impl_from_IMFTransform(iface); + + TRACE("iface %p, message %#x, param %Ix.\n", iface, message, param); + + switch (message) + { + case MFT_MESSAGE_COMMAND_DRAIN: + return wg_transform_drain(decoder->wg_transform); + + case MFT_MESSAGE_COMMAND_FLUSH: + return wg_transform_flush(decoder->wg_transform); + + default: + FIXME("Ignoring message %#x.\n", message); + return S_OK; + } } static HRESULT WINAPI transform_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) @@ -528,7 +542,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return hr; if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, - info.cbSize, NULL, &samples->dwStatus))) + info.cbSize, &samples->dwStatus))) wg_sample_queue_flush(decoder->wg_sample_queue, false); else samples->dwStatus = MFT_OUTPUT_DATA_BUFFER_NO_SAMPLE; @@ -584,31 +598,16 @@ static HEAACWAVEINFO aac_decoder_input_types[] = HRESULT aac_decoder_create(REFIID riid, void **ret) { - static const struct wg_format output_format = - { - .major_type = WG_MAJOR_TYPE_AUDIO, - .u.audio = - { - .format = WG_AUDIO_FORMAT_F32LE, - .channel_mask = 1, - .channels = 1, - .rate = 44100, - }, - }; - static const struct wg_format input_format = {.major_type = WG_MAJOR_TYPE_AUDIO_MPEG4}; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct audio_decoder *decoder; HRESULT hr; TRACE("riid %s, ret %p.\n", debugstr_guid(riid), ret); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) + if (FAILED(hr = check_audio_transform_support(&aac_decoder_input_types[0].wfx, &audio_decoder_output_types[0].Format))) { - ERR_(winediag)("GStreamer doesn't support WMA decoding, please install appropriate plugins\n"); - return E_FAIL; + ERR_(winediag)("GStreamer doesn't support AAC decoding, please install appropriate plugins\n"); + return hr; } - wg_transform_destroy(transform); if (!(decoder = calloc(1, sizeof(*decoder)))) return E_OUTOFMEMORY; diff --git a/dlls/winegstreamer/color_convert.c b/dlls/winegstreamer/color_convert.c index 41e5bcf3b60..4b60628e8ba 100644 --- a/dlls/winegstreamer/color_convert.c +++ b/dlls/winegstreamer/color_convert.c @@ -97,25 +97,15 @@ static inline struct color_convert *impl_from_IUnknown(IUnknown *iface) static HRESULT try_create_wg_transform(struct color_convert *impl) { - struct wg_format input_format, output_format; struct wg_transform_attrs attrs = {0}; if (impl->wg_transform) + { wg_transform_destroy(impl->wg_transform); - impl->wg_transform = 0; - - mf_media_type_to_wg_format(impl->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - mf_media_type_to_wg_format(impl->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - if (!(impl->wg_transform = wg_transform_create(&input_format, &output_format, &attrs))) - return E_FAIL; + impl->wg_transform = 0; + } - return S_OK; + return wg_transform_create_mf(impl->input_type, impl->output_type, &attrs, &impl->wg_transform); } static HRESULT WINAPI unknown_QueryInterface(IUnknown *iface, REFIID iid, void **out) @@ -592,7 +582,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return hr; if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, - info.cbSize, NULL, &samples->dwStatus))) + info.cbSize, &samples->dwStatus))) wg_sample_queue_flush(impl->wg_sample_queue, false); return hr; @@ -917,39 +907,28 @@ static const IPropertyStoreVtbl property_store_vtbl = HRESULT color_convert_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format input_format = + const MFVIDEOFORMAT input_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_I420, - .width = 1920, - .height = 1080, - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MFVideoFormat_I420, }; - static const struct wg_format output_format = + const MFVIDEOFORMAT output_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_NV12, - .width = 1920, - .height = 1080, - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MFVideoFormat_NV12, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct color_convert *impl; HRESULT hr; TRACE("outer %p, out %p.\n", outer, out); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) + if (FAILED(hr = check_video_transform_support(&input_format, &output_format))) { ERR_(winediag)("GStreamer doesn't support video conversion, please install appropriate plugins.\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); if (!(impl = calloc(1, sizeof(*impl)))) return E_OUTOFMEMORY; diff --git a/dlls/winegstreamer/gst_private.h b/dlls/winegstreamer/gst_private.h index 88804c08a4c..adefeaba15c 100644 --- a/dlls/winegstreamer/gst_private.h +++ b/dlls/winegstreamer/gst_private.h @@ -82,10 +82,11 @@ void wg_parser_push_data(wg_parser_t parser, const void *data, uint32_t size); uint32_t wg_parser_get_stream_count(wg_parser_t parser); wg_parser_stream_t wg_parser_get_stream(wg_parser_t parser, uint32_t index); -void wg_parser_stream_get_preferred_format(wg_parser_stream_t stream, struct wg_format *format); +void wg_parser_stream_get_current_format(wg_parser_stream_t stream, struct wg_format *format); +HRESULT wg_parser_stream_get_current_type_mf(wg_parser_stream_t stream, IMFMediaType **media_type); void wg_parser_stream_get_codec_format(wg_parser_stream_t stream, struct wg_format *format); -void wg_parser_stream_enable(wg_parser_stream_t stream, const struct wg_format *format, - uint32_t flags); +HRESULT wg_parser_stream_enable_mf(wg_parser_stream_t stream, IMFMediaType *media_type); +void wg_parser_stream_enable(wg_parser_stream_t stream, const struct wg_format *format); void wg_parser_stream_disable(wg_parser_stream_t stream); bool wg_parser_stream_get_buffer(wg_parser_t parser, wg_parser_stream_t stream, @@ -111,26 +112,29 @@ HRESULT wg_source_get_stream_count(wg_source_t source, uint32_t *stream_count); HRESULT wg_source_get_duration(wg_source_t source, uint64_t *duration); HRESULT wg_source_set_position(wg_source_t source, uint64_t time); HRESULT wg_source_get_position(wg_source_t source, uint64_t *read_offset); -HRESULT wg_source_push_data(wg_source_t source, const void *data, uint32_t size); +HRESULT wg_source_push_data(wg_source_t source, UINT64 offset, const void *data, uint32_t size); HRESULT wg_source_read_data(wg_source_t source, UINT32 index, IMFSample **out); -bool wg_source_get_stream_format(wg_source_t source, UINT32 index, - struct wg_format *format); +HRESULT wg_source_get_stream_type(wg_source_t source, UINT32 index, IMFMediaType **media_type); char *wg_source_get_stream_tag(wg_source_t source, UINT32 index, wg_parser_tag tag); void wg_source_set_stream_flags(wg_source_t source, UINT32 index, BOOL select); -wg_transform_t wg_transform_create(const struct wg_format *input_format, - const struct wg_format *output_format, const struct wg_transform_attrs *attrs); +HRESULT wg_transform_create_mf(IMFMediaType *input_type, IMFMediaType *output_type, + const struct wg_transform_attrs *attrs, wg_transform_t *transform); HRESULT wg_transform_create_quartz(const AM_MEDIA_TYPE *input_format, const AM_MEDIA_TYPE *output_format, const struct wg_transform_attrs *attrs, wg_transform_t *transform); void wg_transform_destroy(wg_transform_t transform); -bool wg_transform_set_output_format(wg_transform_t transform, struct wg_format *format); +HRESULT wg_transform_get_output_type(wg_transform_t transform, IMFMediaType **media_type); +HRESULT wg_transform_set_output_type(wg_transform_t transform, IMFMediaType *media_type); bool wg_transform_get_status(wg_transform_t transform, bool *accepts_input); HRESULT wg_transform_drain(wg_transform_t transform); HRESULT wg_transform_flush(wg_transform_t transform); void wg_transform_notify_qos(wg_transform_t transform, bool underflow, double proportion, int64_t diff, uint64_t timestamp); +HRESULT check_audio_transform_support(const WAVEFORMATEX *input, const WAVEFORMATEX *output); +HRESULT check_video_transform_support(const MFVIDEOFORMAT *input, const MFVIDEOFORMAT *output); + HRESULT wg_muxer_create(const char *format, wg_muxer_t *muxer); void wg_muxer_destroy(wg_muxer_t muxer); HRESULT wg_muxer_add_stream(wg_muxer_t muxer, UINT32 stream_id, const struct wg_format *format); @@ -179,10 +183,16 @@ HRESULT wg_transform_push_quartz(wg_transform_t transform, struct wg_sample *sam HRESULT wg_transform_push_dmo(wg_transform_t transform, IMediaBuffer *media_buffer, DWORD flags, REFERENCE_TIME time_stamp, REFERENCE_TIME time_length, struct wg_sample_queue *queue); HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, - DWORD sample_size, struct wg_format *format, DWORD *flags); + DWORD sample_size, DWORD *flags); HRESULT wg_transform_read_quartz(wg_transform_t transform, struct wg_sample *sample); HRESULT wg_transform_read_dmo(wg_transform_t transform, DMO_OUTPUT_DATA_BUFFER *buffer); +/* These unixlib entry points should not be used directly, they assume samples + * to be queued and zero-copy support, use the helpers below instead. + */ +HRESULT wg_transform_push_data(wg_transform_t transform, struct wg_sample *sample); +HRESULT wg_transform_read_data(wg_transform_t transform, struct wg_sample *sample); + HRESULT gstreamer_byte_stream_handler_create(REFIID riid, void **obj); HRESULT gstreamer_byte_stream_handler_2_create(REFIID riid, void **obj); diff --git a/dlls/winegstreamer/h264_decoder.c b/dlls/winegstreamer/h264_decoder.c deleted file mode 100644 index fae4ede3b54..00000000000 --- a/dlls/winegstreamer/h264_decoder.c +++ /dev/null @@ -1,882 +0,0 @@ -/* H264 Decoder Transform - * - * Copyright 2022 Rémi Bernon for CodeWeavers - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Lesser General Public - * License as published by the Free Software Foundation; either - * version 2.1 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Lesser General Public License for more details. - * - * You should have received a copy of the GNU Lesser General Public - * License along with this library; if not, write to the Free Software - * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA - */ - -#include "gst_private.h" - -#include "mfapi.h" -#include "mferror.h" -#include "mfobjects.h" -#include "mftransform.h" - -#include "wine/debug.h" - -#include "initguid.h" - -#include "codecapi.h" - -WINE_DEFAULT_DEBUG_CHANNEL(mfplat); -WINE_DECLARE_DEBUG_CHANNEL(winediag); - -static const GUID *const video_decoder_output_types[] = -{ - &MFVideoFormat_NV12, - &MFVideoFormat_YV12, - &MFVideoFormat_IYUV, - &MFVideoFormat_I420, - &MFVideoFormat_YUY2, -}; - -struct h264_decoder -{ - IMFTransform IMFTransform_iface; - LONG refcount; - - IMFAttributes *attributes; - IMFAttributes *output_attributes; - - UINT input_type_count; - const GUID *const *input_types; - UINT output_type_count; - const GUID *const *output_types; - - UINT64 sample_time; - IMFMediaType *input_type; - MFT_INPUT_STREAM_INFO input_info; - IMFMediaType *output_type; - MFT_OUTPUT_STREAM_INFO output_info; - IMFMediaType *stream_type; - - wg_transform_t wg_transform; - struct wg_sample_queue *wg_sample_queue; - - IMFVideoSampleAllocatorEx *allocator; - BOOL allocator_initialized; - IMFTransform *copier; - IMFMediaBuffer *temp_buffer; -}; - -static struct h264_decoder *impl_from_IMFTransform(IMFTransform *iface) -{ - return CONTAINING_RECORD(iface, struct h264_decoder, IMFTransform_iface); -} - -static HRESULT try_create_wg_transform(struct h264_decoder *decoder) -{ - /* Call of Duty: Black Ops 3 doesn't care about the ProcessInput/ProcessOutput - * return values, it calls them in a specific order and expects the decoder - * transform to be able to queue its input buffers. We need to use a buffer list - * to match its expectations. - */ - struct wg_transform_attrs attrs = - { - .output_plane_align = 15, - .input_queue_length = 15, - .allow_size_change = TRUE, - }; - struct wg_format input_format; - struct wg_format output_format; - UINT32 low_latency; - - if (decoder->wg_transform) - wg_transform_destroy(decoder->wg_transform); - decoder->wg_transform = 0; - - mf_media_type_to_wg_format(decoder->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - mf_media_type_to_wg_format(decoder->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - if (SUCCEEDED(IMFAttributes_GetUINT32(decoder->attributes, &MF_LOW_LATENCY, &low_latency))) - attrs.low_latency = !!low_latency; - - if (!(decoder->wg_transform = wg_transform_create(&input_format, &output_format, &attrs))) - return E_FAIL; - - return S_OK; -} - -static HRESULT create_output_media_type(struct h264_decoder *decoder, const GUID *subtype, - IMFMediaType *output_type, IMFMediaType **media_type) -{ - IMFMediaType *default_type = decoder->output_type, *stream_type = output_type ? output_type : decoder->stream_type; - IMFVideoMediaType *video_type; - UINT32 value, width, height; - MFVideoArea aperture; - UINT64 ratio; - HRESULT hr; - - if (FAILED(hr = MFCreateVideoMediaTypeFromSubtype(subtype, &video_type))) - return hr; - - if (FAILED(IMFMediaType_GetUINT64(stream_type, &MF_MT_FRAME_SIZE, &ratio))) - ratio = (UINT64)1920 << 32 | 1080; - if (FAILED(hr = IMFVideoMediaType_SetUINT64(video_type, &MF_MT_FRAME_SIZE, ratio))) - goto done; - width = ratio >> 32; - height = ratio; - - if (FAILED(IMFMediaType_GetUINT64(stream_type, &MF_MT_FRAME_RATE, &ratio))) - ratio = (UINT64)30000 << 32 | 1001; - if (FAILED(hr = IMFVideoMediaType_SetUINT64(video_type, &MF_MT_FRAME_RATE, ratio))) - goto done; - - if (FAILED(IMFMediaType_GetUINT64(stream_type, &MF_MT_PIXEL_ASPECT_RATIO, &ratio))) - ratio = (UINT64)1 << 32 | 1; - if (FAILED(hr = IMFVideoMediaType_SetUINT64(video_type, &MF_MT_PIXEL_ASPECT_RATIO, ratio))) - goto done; - - if (!output_type || FAILED(IMFMediaType_GetUINT32(output_type, &MF_MT_SAMPLE_SIZE, &value))) - hr = MFCalculateImageSize(subtype, width, height, &value); - if (FAILED(hr) || FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_SAMPLE_SIZE, value))) - goto done; - - if (!output_type || FAILED(IMFMediaType_GetUINT32(output_type, &MF_MT_DEFAULT_STRIDE, &value))) - hr = MFGetStrideForBitmapInfoHeader(subtype->Data1, width, (LONG *)&value); - if (FAILED(hr) || FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_DEFAULT_STRIDE, value))) - goto done; - - if (!default_type || FAILED(IMFMediaType_GetUINT32(default_type, &MF_MT_INTERLACE_MODE, &value))) - value = MFVideoInterlace_MixedInterlaceOrProgressive; - if (FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_INTERLACE_MODE, value))) - goto done; - - if (!default_type || FAILED(IMFMediaType_GetUINT32(default_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, &value))) - value = 1; - if (FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, value))) - goto done; - - if (!default_type || FAILED(IMFMediaType_GetUINT32(default_type, &MF_MT_VIDEO_ROTATION, &value))) - value = 0; - if (FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_VIDEO_ROTATION, value))) - goto done; - - if (!default_type || FAILED(IMFMediaType_GetUINT32(default_type, &MF_MT_FIXED_SIZE_SAMPLES, &value))) - value = 1; - if (FAILED(hr = IMFVideoMediaType_SetUINT32(video_type, &MF_MT_FIXED_SIZE_SAMPLES, value))) - goto done; - - if (SUCCEEDED(IMFMediaType_GetBlob(stream_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, - (BYTE *)&aperture, sizeof(aperture), &value))) - { - if (FAILED(hr = IMFVideoMediaType_SetBlob(video_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, - (BYTE *)&aperture, sizeof(aperture)))) - goto done; - } - -done: - if (SUCCEEDED(hr)) - *media_type = (IMFMediaType *)video_type; - else - { - IMFVideoMediaType_Release(video_type); - *media_type = NULL; - } - - return hr; -} - -static HRESULT init_allocator(struct h264_decoder *decoder) -{ - HRESULT hr; - - if (decoder->allocator_initialized) - return S_OK; - - if (FAILED(hr = IMFTransform_SetInputType(decoder->copier, 0, decoder->output_type, 0))) - return hr; - if (FAILED(hr = IMFTransform_SetOutputType(decoder->copier, 0, decoder->output_type, 0))) - return hr; - - if (FAILED(hr = IMFVideoSampleAllocatorEx_InitializeSampleAllocatorEx(decoder->allocator, 10, 10, - decoder->attributes, decoder->output_type))) - return hr; - decoder->allocator_initialized = TRUE; - return S_OK; -} - -static void uninit_allocator(struct h264_decoder *decoder) -{ - IMFVideoSampleAllocatorEx_UninitializeSampleAllocator(decoder->allocator); - decoder->allocator_initialized = FALSE; -} - -static HRESULT WINAPI transform_QueryInterface(IMFTransform *iface, REFIID iid, void **out) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, iid %s, out %p.\n", iface, debugstr_guid(iid), out); - - if (IsEqualGUID(iid, &IID_IUnknown) || - IsEqualGUID(iid, &IID_IMFTransform)) - *out = &decoder->IMFTransform_iface; - else - { - *out = NULL; - WARN("%s not implemented, returning E_NOINTERFACE.\n", debugstr_guid(iid)); - return E_NOINTERFACE; - } - - IUnknown_AddRef((IUnknown *)*out); - return S_OK; -} - -static ULONG WINAPI transform_AddRef(IMFTransform *iface) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedIncrement(&decoder->refcount); - - TRACE("iface %p increasing refcount to %lu.\n", decoder, refcount); - - return refcount; -} - -static ULONG WINAPI transform_Release(IMFTransform *iface) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - ULONG refcount = InterlockedDecrement(&decoder->refcount); - - TRACE("iface %p decreasing refcount to %lu.\n", decoder, refcount); - - if (!refcount) - { - IMFTransform_Release(decoder->copier); - IMFVideoSampleAllocatorEx_Release(decoder->allocator); - if (decoder->temp_buffer) - IMFMediaBuffer_Release(decoder->temp_buffer); - if (decoder->wg_transform) - wg_transform_destroy(decoder->wg_transform); - if (decoder->input_type) - IMFMediaType_Release(decoder->input_type); - if (decoder->output_type) - IMFMediaType_Release(decoder->output_type); - if (decoder->output_attributes) - IMFAttributes_Release(decoder->output_attributes); - if (decoder->attributes) - IMFAttributes_Release(decoder->attributes); - wg_sample_queue_destroy(decoder->wg_sample_queue); - free(decoder); - } - - return refcount; -} - -static HRESULT WINAPI transform_GetStreamLimits(IMFTransform *iface, DWORD *input_minimum, - DWORD *input_maximum, DWORD *output_minimum, DWORD *output_maximum) -{ - TRACE("iface %p, input_minimum %p, input_maximum %p, output_minimum %p, output_maximum %p.\n", - iface, input_minimum, input_maximum, output_minimum, output_maximum); - *input_minimum = *input_maximum = *output_minimum = *output_maximum = 1; - return S_OK; -} - -static HRESULT WINAPI transform_GetStreamCount(IMFTransform *iface, DWORD *inputs, DWORD *outputs) -{ - TRACE("iface %p, inputs %p, outputs %p.\n", iface, inputs, outputs); - *inputs = *outputs = 1; - return S_OK; -} - -static HRESULT WINAPI transform_GetStreamIDs(IMFTransform *iface, DWORD input_size, DWORD *inputs, - DWORD output_size, DWORD *outputs) -{ - TRACE("iface %p, input_size %lu, inputs %p, output_size %lu, outputs %p.\n", iface, - input_size, inputs, output_size, outputs); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_GetInputStreamInfo(IMFTransform *iface, DWORD id, MFT_INPUT_STREAM_INFO *info) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, info %p.\n", iface, id, info); - - *info = decoder->input_info; - return S_OK; -} - -static HRESULT WINAPI transform_GetOutputStreamInfo(IMFTransform *iface, DWORD id, MFT_OUTPUT_STREAM_INFO *info) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, info %p.\n", iface, id, info); - - *info = decoder->output_info; - return S_OK; -} - -static HRESULT WINAPI transform_GetAttributes(IMFTransform *iface, IMFAttributes **attributes) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - FIXME("iface %p, attributes %p semi-stub!\n", iface, attributes); - - if (!attributes) - return E_POINTER; - - IMFAttributes_AddRef((*attributes = decoder->attributes)); - return S_OK; -} - -static HRESULT WINAPI transform_GetInputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) -{ - TRACE("iface %p, id %#lx, attributes %p.\n", iface, id, attributes); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_GetOutputStreamAttributes(IMFTransform *iface, DWORD id, IMFAttributes **attributes) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - FIXME("iface %p, id %#lx, attributes %p semi-stub!\n", iface, id, attributes); - - if (!attributes) - return E_POINTER; - if (id) - return MF_E_INVALIDSTREAMNUMBER; - - IMFAttributes_AddRef((*attributes = decoder->output_attributes)); - return S_OK; -} - -static HRESULT WINAPI transform_DeleteInputStream(IMFTransform *iface, DWORD id) -{ - TRACE("iface %p, id %#lx.\n", iface, id); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_AddInputStreams(IMFTransform *iface, DWORD streams, DWORD *ids) -{ - TRACE("iface %p, streams %lu, ids %p.\n", iface, streams, ids); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_GetInputAvailableType(IMFTransform *iface, DWORD id, DWORD index, - IMFMediaType **type) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, index %#lx, type %p.\n", iface, id, index, type); - - *type = NULL; - if (index >= decoder->input_type_count) - return MF_E_NO_MORE_TYPES; - return MFCreateVideoMediaTypeFromSubtype(decoder->input_types[index], (IMFVideoMediaType **)type); -} - -static HRESULT WINAPI transform_GetOutputAvailableType(IMFTransform *iface, DWORD id, - DWORD index, IMFMediaType **type) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, index %#lx, type %p.\n", iface, id, index, type); - - *type = NULL; - if (!decoder->input_type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - if (index >= decoder->output_type_count) - return MF_E_NO_MORE_TYPES; - return create_output_media_type(decoder, decoder->output_types[index], NULL, type); -} - -static HRESULT WINAPI transform_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - GUID major, subtype; - UINT64 frame_size; - HRESULT hr; - ULONG i; - - TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); - - if (FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || - FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &subtype))) - return E_INVALIDARG; - - if (!IsEqualGUID(&major, &MFMediaType_Video)) - return MF_E_INVALIDMEDIATYPE; - - for (i = 0; i < decoder->input_type_count; ++i) - if (IsEqualGUID(&subtype, decoder->input_types[i])) - break; - if (i == decoder->input_type_count) - return MF_E_INVALIDMEDIATYPE; - if (flags & MFT_SET_TYPE_TEST_ONLY) - return S_OK; - - if (decoder->output_type) - { - IMFMediaType_Release(decoder->output_type); - decoder->output_type = NULL; - } - - if (decoder->input_type) - IMFMediaType_Release(decoder->input_type); - IMFMediaType_AddRef((decoder->input_type = type)); - - if (SUCCEEDED(IMFMediaType_GetUINT64(type, &MF_MT_FRAME_SIZE, &frame_size))) - { - if (FAILED(hr = IMFMediaType_SetUINT64(decoder->stream_type, &MF_MT_FRAME_SIZE, frame_size))) - WARN("Failed to update stream type frame size, hr %#lx\n", hr); - decoder->output_info.cbSize = (frame_size >> 32) * (UINT32)frame_size * 2; - } - - return S_OK; -} - -static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - UINT64 frame_size, stream_frame_size; - GUID major, subtype; - HRESULT hr; - ULONG i; - - TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); - - if (!decoder->input_type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - if (FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || - FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &subtype))) - return hr; - - if (!IsEqualGUID(&major, &MFMediaType_Video)) - return MF_E_INVALIDMEDIATYPE; - - for (i = 0; i < decoder->output_type_count; ++i) - if (IsEqualGUID(&subtype, decoder->output_types[i])) - break; - if (i == decoder->output_type_count) - return MF_E_INVALIDMEDIATYPE; - - if (FAILED(hr = IMFMediaType_GetUINT64(type, &MF_MT_FRAME_SIZE, &frame_size))) - return MF_E_INVALIDMEDIATYPE; - if (SUCCEEDED(IMFMediaType_GetUINT64(decoder->stream_type, &MF_MT_FRAME_SIZE, &stream_frame_size)) - && frame_size != stream_frame_size) - return MF_E_INVALIDMEDIATYPE; - if (flags & MFT_SET_TYPE_TEST_ONLY) - return S_OK; - - if (decoder->output_type) - IMFMediaType_Release(decoder->output_type); - IMFMediaType_AddRef((decoder->output_type = type)); - - if (decoder->wg_transform) - { - struct wg_format output_format; - mf_media_type_to_wg_format(decoder->output_type, &output_format); - - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN - || !wg_transform_set_output_format(decoder->wg_transform, &output_format)) - { - IMFMediaType_Release(decoder->output_type); - decoder->output_type = NULL; - return MF_E_INVALIDMEDIATYPE; - } - } - else if (FAILED(hr = try_create_wg_transform(decoder))) - { - IMFMediaType_Release(decoder->output_type); - decoder->output_type = NULL; - } - - return hr; -} - -static HRESULT WINAPI transform_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - HRESULT hr; - - TRACE("iface %p, id %#lx, type %p\n", iface, id, type); - - if (!decoder->input_type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - if (FAILED(hr = MFCreateMediaType(type))) - return hr; - - return IMFMediaType_CopyAllItems(decoder->input_type, (IMFAttributes *)*type); -} - -static HRESULT WINAPI transform_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - GUID subtype; - HRESULT hr; - - TRACE("iface %p, id %#lx, type %p\n", iface, id, type); - - if (!decoder->output_type) - return MF_E_TRANSFORM_TYPE_NOT_SET; - if (FAILED(hr = IMFMediaType_GetGUID(decoder->output_type, &MF_MT_SUBTYPE, &subtype))) - return hr; - return create_output_media_type(decoder, &subtype, decoder->output_type, type); -} - -static HRESULT WINAPI transform_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, flags %p.\n", iface, id, flags); - - if (!decoder->wg_transform) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - *flags = MFT_INPUT_STATUS_ACCEPT_DATA; - return S_OK; -} - -static HRESULT WINAPI transform_GetOutputStatus(IMFTransform *iface, DWORD *flags) -{ - FIXME("iface %p, flags %p stub!\n", iface, flags); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_SetOutputBounds(IMFTransform *iface, LONGLONG lower, LONGLONG upper) -{ - TRACE("iface %p, lower %I64d, upper %I64d.\n", iface, lower, upper); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_ProcessEvent(IMFTransform *iface, DWORD id, IMFMediaEvent *event) -{ - FIXME("iface %p, id %#lx, event %p stub!\n", iface, id, event); - return E_NOTIMPL; -} - -static HRESULT WINAPI transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - HRESULT hr; - - TRACE("iface %p, message %#x, param %Ix.\n", iface, message, param); - - switch (message) - { - case MFT_MESSAGE_SET_D3D_MANAGER: - if (FAILED(hr = IMFVideoSampleAllocatorEx_SetDirectXManager(decoder->allocator, (IUnknown *)param))) - return hr; - - uninit_allocator(decoder); - if (param) - decoder->output_info.dwFlags |= MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; - else - decoder->output_info.dwFlags &= ~MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; - return S_OK; - - case MFT_MESSAGE_COMMAND_DRAIN: - return wg_transform_drain(decoder->wg_transform); - - case MFT_MESSAGE_COMMAND_FLUSH: - return wg_transform_flush(decoder->wg_transform); - - default: - FIXME("Ignoring message %#x.\n", message); - return S_OK; - } -} - -static HRESULT WINAPI transform_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - - TRACE("iface %p, id %#lx, sample %p, flags %#lx.\n", iface, id, sample, flags); - - if (!decoder->wg_transform) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - return wg_transform_push_mf(decoder->wg_transform, sample, decoder->wg_sample_queue); -} - -static HRESULT output_sample(struct h264_decoder *decoder, IMFSample **out, IMFSample *src_sample) -{ - MFT_OUTPUT_DATA_BUFFER output[1]; - IMFSample *sample; - DWORD status; - HRESULT hr; - - if (FAILED(hr = init_allocator(decoder))) - { - ERR("Failed to initialize allocator, hr %#lx.\n", hr); - return hr; - } - if (FAILED(hr = IMFVideoSampleAllocatorEx_AllocateSample(decoder->allocator, &sample))) - return hr; - - if (FAILED(hr = IMFTransform_ProcessInput(decoder->copier, 0, src_sample, 0))) - { - IMFSample_Release(sample); - return hr; - } - output[0].pSample = sample; - if (FAILED(hr = IMFTransform_ProcessOutput(decoder->copier, 0, 1, output, &status))) - { - IMFSample_Release(sample); - return hr; - } - *out = sample; - return S_OK; -} - -static HRESULT handle_stream_type_change(struct h264_decoder *decoder, const struct wg_format *format) -{ - UINT64 frame_size, frame_rate; - HRESULT hr; - - if (decoder->stream_type) - IMFMediaType_Release(decoder->stream_type); - if (!(decoder->stream_type = mf_media_type_from_wg_format(format))) - return E_OUTOFMEMORY; - - if (SUCCEEDED(IMFMediaType_GetUINT64(decoder->output_type, &MF_MT_FRAME_RATE, &frame_rate)) - && FAILED(hr = IMFMediaType_SetUINT64(decoder->stream_type, &MF_MT_FRAME_RATE, frame_rate))) - WARN("Failed to update stream type frame size, hr %#lx\n", hr); - - if (FAILED(hr = IMFMediaType_GetUINT64(decoder->stream_type, &MF_MT_FRAME_SIZE, &frame_size))) - return hr; - decoder->output_info.cbSize = (frame_size >> 32) * (UINT32)frame_size * 2; - uninit_allocator(decoder); - - return MF_E_TRANSFORM_STREAM_CHANGE; -} - -static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, - MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) -{ - struct h264_decoder *decoder = impl_from_IMFTransform(iface); - struct wg_format wg_format; - UINT32 sample_size; - LONGLONG duration; - IMFSample *sample; - UINT64 frame_size, frame_rate; - GUID subtype; - DWORD size; - HRESULT hr; - - TRACE("iface %p, flags %#lx, count %lu, samples %p, status %p.\n", iface, flags, count, samples, status); - - if (count != 1) - return E_INVALIDARG; - - if (!decoder->wg_transform) - return MF_E_TRANSFORM_TYPE_NOT_SET; - - *status = samples->dwStatus = 0; - if (!(sample = samples->pSample) && !(decoder->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) - return E_INVALIDARG; - - if (FAILED(hr = IMFMediaType_GetGUID(decoder->output_type, &MF_MT_SUBTYPE, &subtype))) - return hr; - if (FAILED(hr = IMFMediaType_GetUINT64(decoder->output_type, &MF_MT_FRAME_SIZE, &frame_size))) - return hr; - if (FAILED(hr = MFCalculateImageSize(&subtype, frame_size >> 32, (UINT32)frame_size, &sample_size))) - return hr; - - if (decoder->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) - { - if (decoder->temp_buffer) - { - if (FAILED(IMFMediaBuffer_GetMaxLength(decoder->temp_buffer, &size)) || size < sample_size) - { - IMFMediaBuffer_Release(decoder->temp_buffer); - decoder->temp_buffer = NULL; - } - } - if (!decoder->temp_buffer && FAILED(hr = MFCreateMemoryBuffer(sample_size, &decoder->temp_buffer))) - return hr; - if (FAILED(hr = MFCreateSample(&sample))) - return hr; - if (FAILED(hr = IMFSample_AddBuffer(sample, decoder->temp_buffer))) - { - IMFSample_Release(sample); - return hr; - } - } - - if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, sample, - sample_size, &wg_format, &samples->dwStatus))) - { - wg_sample_queue_flush(decoder->wg_sample_queue, false); - - if (FAILED(IMFMediaType_GetUINT64(decoder->input_type, &MF_MT_FRAME_RATE, &frame_rate))) - frame_rate = (UINT64)30000 << 32 | 1001; - - duration = (UINT64)10000000 * (UINT32)frame_rate / (frame_rate >> 32); - if (FAILED(IMFSample_SetSampleTime(sample, decoder->sample_time))) - WARN("Failed to set sample time\n"); - if (FAILED(IMFSample_SetSampleDuration(sample, duration))) - WARN("Failed to set sample duration\n"); - decoder->sample_time += duration; - } - - if (hr == MF_E_TRANSFORM_STREAM_CHANGE) - { - samples[0].dwStatus |= MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; - *status |= MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; - hr = handle_stream_type_change(decoder, &wg_format); - } - - if (decoder->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) - { - if (hr == S_OK && FAILED(hr = output_sample(decoder, &samples->pSample, sample))) - ERR("Failed to output sample, hr %#lx.\n", hr); - IMFSample_Release(sample); - } - - return hr; -} - -static const IMFTransformVtbl transform_vtbl = -{ - transform_QueryInterface, - transform_AddRef, - transform_Release, - transform_GetStreamLimits, - transform_GetStreamCount, - transform_GetStreamIDs, - transform_GetInputStreamInfo, - transform_GetOutputStreamInfo, - transform_GetAttributes, - transform_GetInputStreamAttributes, - transform_GetOutputStreamAttributes, - transform_DeleteInputStream, - transform_AddInputStreams, - transform_GetInputAvailableType, - transform_GetOutputAvailableType, - transform_SetInputType, - transform_SetOutputType, - transform_GetInputCurrentType, - transform_GetOutputCurrentType, - transform_GetInputStatus, - transform_GetOutputStatus, - transform_SetOutputBounds, - transform_ProcessEvent, - transform_ProcessMessage, - transform_ProcessInput, - transform_ProcessOutput, -}; - -static HRESULT video_decoder_create_with_types(const GUID *const *input_types, UINT input_type_count, - const GUID *const *output_types, UINT output_type_count, IMFTransform **ret) -{ - struct h264_decoder *decoder; - HRESULT hr; - - if (!(decoder = calloc(1, sizeof(*decoder)))) - return E_OUTOFMEMORY; - - decoder->IMFTransform_iface.lpVtbl = &transform_vtbl; - decoder->refcount = 1; - - decoder->input_type_count = input_type_count; - decoder->input_types = input_types; - decoder->output_type_count = output_type_count; - decoder->output_types = output_types; - - decoder->input_info.dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER - | MFT_INPUT_STREAM_FIXED_SAMPLE_SIZE; - decoder->input_info.cbSize = 0x1000; - decoder->output_info.dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER - | MFT_OUTPUT_STREAM_FIXED_SAMPLE_SIZE; - decoder->output_info.cbSize = 1920 * 1088 * 2; - - if (FAILED(hr = MFCreateMediaType(&decoder->stream_type))) - goto failed; - if (FAILED(hr = MFCreateAttributes(&decoder->attributes, 16))) - goto failed; - if (FAILED(hr = IMFAttributes_SetUINT32(decoder->attributes, &MF_LOW_LATENCY, 0))) - goto failed; - if (FAILED(hr = IMFAttributes_SetUINT32(decoder->attributes, &MF_SA_D3D11_AWARE, TRUE))) - goto failed; - if (FAILED(hr = IMFAttributes_SetUINT32(decoder->attributes, &AVDecVideoAcceleration_H264, TRUE))) - goto failed; - - if (FAILED(hr = MFCreateAttributes(&decoder->output_attributes, 0))) - goto failed; - if (FAILED(hr = wg_sample_queue_create(&decoder->wg_sample_queue))) - goto failed; - if (FAILED(hr = MFCreateVideoSampleAllocatorEx(&IID_IMFVideoSampleAllocatorEx, (void **)&decoder->allocator))) - goto failed; - if (FAILED(hr = MFCreateSampleCopierMFT(&decoder->copier))) - goto failed; - - *ret = &decoder->IMFTransform_iface; - TRACE("Created decoder %p\n", *ret); - return S_OK; - -failed: - if (decoder->allocator) - IMFVideoSampleAllocatorEx_Release(decoder->allocator); - if (decoder->wg_sample_queue) - wg_sample_queue_destroy(decoder->wg_sample_queue); - if (decoder->output_attributes) - IMFAttributes_Release(decoder->output_attributes); - if (decoder->attributes) - IMFAttributes_Release(decoder->attributes); - if (decoder->stream_type) - IMFMediaType_Release(decoder->stream_type); - free(decoder); - return hr; -} - -static const GUID *const h264_decoder_input_types[] = -{ - &MFVideoFormat_H264, - &MFVideoFormat_H264_ES, -}; - -HRESULT h264_decoder_create(REFIID riid, void **out) -{ - static const struct wg_format output_format = - { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_I420, - .width = 1920, - .height = 1080, - }, - }; - static const struct wg_format input_format = {.major_type = WG_MAJOR_TYPE_VIDEO_H264}; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; - IMFTransform *iface; - HRESULT hr; - - TRACE("riid %s, out %p.\n", debugstr_guid(riid), out); - - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) - { - ERR_(winediag)("GStreamer doesn't support H.264 decoding, please install appropriate plugins\n"); - return E_FAIL; - } - wg_transform_destroy(transform); - - if (FAILED(hr = video_decoder_create_with_types(h264_decoder_input_types, ARRAY_SIZE(h264_decoder_input_types), - video_decoder_output_types, ARRAY_SIZE(video_decoder_output_types), &iface))) - return hr; - - hr = IMFTransform_QueryInterface(iface, riid, out); - IMFTransform_Release(iface); - return hr; -} diff --git a/dlls/winegstreamer/main.c b/dlls/winegstreamer/main.c index f7733fcbcfb..374924a3e6a 100644 --- a/dlls/winegstreamer/main.c +++ b/dlls/winegstreamer/main.c @@ -31,6 +31,8 @@ #include "dmoreg.h" #include "gst_guids.h" #include "wmcodecdsp.h" +#include "mferror.h" +#include "mfapi.h" WINE_DEFAULT_DEBUG_CHANNEL(quartz); WINE_DECLARE_DEBUG_CHANNEL(mfplat); @@ -68,6 +70,101 @@ bool array_reserve(void **elements, size_t *capacity, size_t count, size_t size) return TRUE; } +static HRESULT video_format_from_media_type(IMFMediaType *media_type, MFVIDEOFORMAT **format, UINT32 *format_size) +{ + GUID subtype; + HRESULT hr; + + if (FAILED(hr = IMFMediaType_GetGUID(media_type, &MF_MT_SUBTYPE, &subtype))) + return hr; + if (FAILED(hr = MFCreateMFVideoFormatFromMFMediaType(media_type, format, format_size))) + return hr; + + /* fixup MPEG video formats here, so we can consistently use MFVIDEOFORMAT internally */ + if (IsEqualGUID(&subtype, &MEDIASUBTYPE_MPEG1Payload) + || IsEqualGUID(&subtype, &MEDIASUBTYPE_MPEG1Packet) + || IsEqualGUID(&subtype, &MEDIASUBTYPE_MPEG2_VIDEO)) + { + struct mpeg_video_format *mpeg; + UINT32 mpeg_size, len; + + if (FAILED(IMFMediaType_GetBlobSize(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, &len))) + len = 0; + mpeg_size = offsetof(struct mpeg_video_format, sequence_header[len]); + + if ((mpeg = CoTaskMemAlloc(mpeg_size))) + { + memset(mpeg, 0, mpeg_size); + mpeg->hdr = **format; + + IMFMediaType_GetBlob(media_type, &MF_MT_MPEG_SEQUENCE_HEADER, mpeg->sequence_header, len, NULL); + IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG_START_TIME_CODE, (UINT32 *)&mpeg->start_time_code); + IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_PROFILE, &mpeg->profile); + IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_LEVEL, &mpeg->level); + IMFMediaType_GetUINT32(media_type, &MF_MT_MPEG2_FLAGS, &mpeg->flags); + + CoTaskMemFree(*format); + *format = &mpeg->hdr; + *format_size = mpeg_size; + } + } + + return hr; +} + +static HRESULT wg_media_type_from_mf(IMFMediaType *media_type, struct wg_media_type *wg_media_type) +{ + HRESULT hr; + + if (FAILED(hr = IMFMediaType_GetMajorType(media_type, &wg_media_type->major))) + return hr; + + if (IsEqualGUID(&wg_media_type->major, &MFMediaType_Video)) + return video_format_from_media_type(media_type, &wg_media_type->u.video, + &wg_media_type->format_size); + if (IsEqualGUID(&wg_media_type->major, &MFMediaType_Audio)) + return MFCreateWaveFormatExFromMFMediaType(media_type, &wg_media_type->u.audio, + &wg_media_type->format_size, 0); + + FIXME("Unsupported major type %s\n", debugstr_guid(&wg_media_type->major)); + return E_NOTIMPL; +} + +static HRESULT media_type_from_video_format(const MFVIDEOFORMAT *format, IMFMediaType **media_type) +{ + HRESULT hr; + + if (FAILED(hr = MFCreateVideoMediaType(format, (IMFVideoMediaType **)media_type)) || format->dwSize <= sizeof(*format)) + return hr; + + /* fixup MPEG video formats here, so we can consistently use MFVIDEOFORMAT internally */ + if (IsEqualGUID(&format->guidFormat, &MEDIASUBTYPE_MPEG1Payload) + || IsEqualGUID(&format->guidFormat, &MEDIASUBTYPE_MPEG1Packet) + || IsEqualGUID(&format->guidFormat, &MEDIASUBTYPE_MPEG2_VIDEO)) + { + struct mpeg_video_format *mpeg = (struct mpeg_video_format *)format; + IMFMediaType_SetBlob(*media_type, &MF_MT_MPEG_SEQUENCE_HEADER, mpeg->sequence_header, mpeg->sequence_header_count); + IMFMediaType_SetUINT32(*media_type, &MF_MT_MPEG_START_TIME_CODE, mpeg->start_time_code); + IMFMediaType_SetUINT32(*media_type, &MF_MT_MPEG2_PROFILE, mpeg->profile); + IMFMediaType_SetUINT32(*media_type, &MF_MT_MPEG2_LEVEL, mpeg->level); + IMFMediaType_SetUINT32(*media_type, &MF_MT_MPEG2_FLAGS, mpeg->flags); + IMFMediaType_DeleteItem(*media_type, &MF_MT_USER_DATA); + } + + return hr; +} + +static HRESULT wg_media_type_to_mf(const struct wg_media_type *wg_media_type, IMFMediaType **media_type) +{ + if (IsEqualGUID(&wg_media_type->major, &MFMediaType_Video)) + return media_type_from_video_format(wg_media_type->u.video, media_type); + if (IsEqualGUID(&wg_media_type->major, &MFMediaType_Audio)) + return MFCreateAudioMediaType(wg_media_type->u.audio, (IMFAudioMediaType **)media_type); + + FIXME("Unsupported major type %s\n", debugstr_guid(&wg_media_type->major)); + return E_NOTIMPL; +} + wg_parser_t wg_parser_create(enum wg_parser_type type, bool output_compressed, bool use_opengl) { struct wg_parser_create_params params = @@ -176,9 +273,48 @@ wg_parser_stream_t wg_parser_get_stream(wg_parser_t parser, uint32_t index) return params.stream; } -void wg_parser_stream_get_preferred_format(wg_parser_stream_t stream, struct wg_format *format) +static HRESULT wg_get_media_type_mf(enum unix_funcs unix_func, void *params, + struct wg_media_type *wg_media_type, IMFMediaType **media_type) { - struct wg_parser_stream_get_preferred_format_params params = + NTSTATUS status; + HRESULT hr; + + if ((status = WINE_UNIX_CALL(unix_func, params)) + && status == STATUS_BUFFER_TOO_SMALL) + { + if (!(wg_media_type->u.format = CoTaskMemAlloc(wg_media_type->format_size))) + return E_OUTOFMEMORY; + status = WINE_UNIX_CALL(unix_func, params); + } + + if (status) + { + CoTaskMemFree(wg_media_type->u.format); + WARN("Failed to get output media type, status %#lx\n", status); + return HRESULT_FROM_NT(status); + } + + hr = wg_media_type_to_mf(wg_media_type, media_type); + CoTaskMemFree(wg_media_type->u.format); + return hr; +} + +HRESULT wg_parser_stream_get_current_type_mf(wg_parser_stream_t stream, IMFMediaType **media_type) +{ + struct wg_parser_stream_get_current_type_params params = + { + .stream = stream, + }; + + TRACE("stream %#I64x, media_type %p.\n", stream, media_type); + + return wg_get_media_type_mf(unix_wg_parser_stream_get_current_type, ¶ms, + ¶ms.media_type, media_type); +} + +void wg_parser_stream_get_current_format(wg_parser_stream_t stream, struct wg_format *format) +{ + struct wg_parser_stream_get_current_format_params params = { .stream = stream, .format = format, @@ -186,7 +322,7 @@ void wg_parser_stream_get_preferred_format(wg_parser_stream_t stream, struct wg_ TRACE("stream %#I64x, format %p.\n", stream, format); - WINE_UNIX_CALL(unix_wg_parser_stream_get_preferred_format, ¶ms); + WINE_UNIX_CALL(unix_wg_parser_stream_get_current_format, ¶ms); } void wg_parser_stream_get_codec_format(wg_parser_stream_t stream, struct wg_format *format) @@ -202,14 +338,12 @@ void wg_parser_stream_get_codec_format(wg_parser_stream_t stream, struct wg_form WINE_UNIX_CALL(unix_wg_parser_stream_get_codec_format, ¶ms); } -void wg_parser_stream_enable(wg_parser_stream_t stream, const struct wg_format *format, - uint32_t flags) +void wg_parser_stream_enable(wg_parser_stream_t stream, const struct wg_format *format) { struct wg_parser_stream_enable_params params = { .stream = stream, .format = format, - .flags = flags, }; TRACE("stream %#I64x, format %p.\n", stream, format); @@ -217,6 +351,29 @@ void wg_parser_stream_enable(wg_parser_stream_t stream, const struct wg_format * WINE_UNIX_CALL(unix_wg_parser_stream_enable, ¶ms); } +HRESULT wg_parser_stream_enable_mf(wg_parser_stream_t stream, IMFMediaType *media_type) +{ + struct wg_parser_stream_enable_type_params params = + { + .stream = stream, + }; + NTSTATUS status; + HRESULT hr; + + TRACE("stream %#I64x, media_type %p.\n", stream, media_type); + + if (FAILED(hr = wg_media_type_from_mf(media_type, ¶ms.media_type))) + return hr; + if ((status = WINE_UNIX_CALL(unix_wg_parser_stream_enable_type, ¶ms))) + { + WARN("Failed to enable stream, status %#lx.\n", status); + hr = HRESULT_FROM_NT(status); + } + + CoTaskMemFree(params.media_type.u.format); + return hr; +} + void wg_parser_stream_disable(wg_parser_stream_t stream) { TRACE("stream %#I64x.\n", stream); @@ -456,37 +613,33 @@ HRESULT wg_source_set_position(wg_source_t source, uint64_t time) return HRESULT_FROM_NT(WINE_UNIX_CALL(unix_wg_source_set_position, ¶ms)); } -HRESULT wg_source_push_data(wg_source_t source, const void *data, uint32_t size) +HRESULT wg_source_push_data(wg_source_t source, UINT64 offset, const void *data, uint32_t size) { struct wg_source_push_data_params params = { .source = source, + .offset = offset, .data = data, .size = size, }; - TRACE("source %#I64x, data %p, size %#x\n", source, data, size); + TRACE("source %#I64x, offset %#I64x, data %p, size %#x\n", source, offset, data, size); return HRESULT_FROM_NT(WINE_UNIX_CALL(unix_wg_source_push_data, ¶ms)); } -bool wg_source_get_stream_format(wg_source_t source, UINT32 index, - struct wg_format *format) +HRESULT wg_source_get_stream_type(wg_source_t source, UINT32 index, IMFMediaType **media_type) { - struct wg_source_get_stream_format_params params = + struct wg_source_get_stream_type_params params = { .source = source, .index = index, }; - TRACE("source %#I64x, index %u, format %p\n", source, - index, format); - - if (WINE_UNIX_CALL(unix_wg_source_get_stream_format, ¶ms)) - return false; + TRACE("source %#I64x, index %u, media_type %p\n", source, index, media_type); - *format = params.format; - return true; + return wg_get_media_type_mf(unix_wg_source_get_stream_type, ¶ms, + ¶ms.media_type, media_type); } char *wg_source_get_stream_tag(wg_source_t source, UINT32 index, wg_parser_tag tag) @@ -532,38 +685,59 @@ void wg_source_set_stream_flags(wg_source_t source, UINT32 index, BOOL select) WINE_UNIX_CALL(unix_wg_source_set_stream_flags, ¶ms); } -wg_transform_t wg_transform_create(const struct wg_format *input_format, - const struct wg_format *output_format, const struct wg_transform_attrs *attrs) +HRESULT wg_transform_create_mf(IMFMediaType *input_type, IMFMediaType *output_type, + const struct wg_transform_attrs *attrs, wg_transform_t *transform) { struct wg_transform_create_params params = { - .input_format = input_format, - .output_format = output_format, - .attrs = attrs, + .attrs = *attrs, }; + NTSTATUS status; + HRESULT hr; - TRACE("input_format %p, output_format %p.\n", input_format, output_format); + TRACE("input_type %p, output_type %p.\n", input_type, output_type); - if (WINE_UNIX_CALL(unix_wg_transform_create, ¶ms)) - return 0; + if (FAILED(hr = wg_media_type_from_mf(input_type, ¶ms.input_type))) + return hr; + if (FAILED(hr = wg_media_type_from_mf(output_type, ¶ms.output_type))) + { + CoTaskMemFree(params.input_type.u.format); + return hr; + } - TRACE("Returning transform %#I64x.\n", params.transform); - return params.transform; + if ((status = WINE_UNIX_CALL(unix_wg_transform_create, ¶ms))) + { + WARN("Failed to create transform, status %#lx\n", status); + hr = HRESULT_FROM_NT(status); + } + + CoTaskMemFree(params.output_type.u.format); + CoTaskMemFree(params.input_type.u.format); + *transform = params.transform; + return hr; } -HRESULT wg_transform_create_quartz(const AM_MEDIA_TYPE *input_type, const AM_MEDIA_TYPE *output_type, +HRESULT wg_transform_create_quartz(const AM_MEDIA_TYPE *input_format, const AM_MEDIA_TYPE *output_format, const struct wg_transform_attrs *attrs, wg_transform_t *transform) { - struct wg_format input_format, output_format; + IMFMediaType *input_type, *output_type; + HRESULT hr; - if (!amt_to_wg_format(input_type, &input_format)) - return E_FAIL; - if (!amt_to_wg_format(output_type, &output_format)) - return E_FAIL; + TRACE("input_format %p, output_format %p.\n", input_format, output_format); - if (!(*transform = wg_transform_create(&input_format, &output_format, attrs))) - return E_FAIL; - return S_OK; + /* through IMFMediaType to normalize representation to MFVIDEOFORMAT / WAVEFORMATEX */ + if (FAILED(hr = MFCreateMediaTypeFromRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void *)input_format, &input_type))) + return 0; + if (FAILED(hr = MFCreateMediaTypeFromRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void *)output_format, &output_type))) + { + IMFMediaType_Release(input_type); + return 0; + } + + hr = wg_transform_create_mf(input_type, output_type, attrs, transform); + IMFMediaType_Release(output_type); + IMFMediaType_Release(input_type); + return hr; } void wg_transform_destroy(wg_transform_t transform) @@ -590,18 +764,16 @@ HRESULT wg_transform_push_data(wg_transform_t transform, struct wg_sample *sampl return params.result; } -HRESULT wg_transform_read_data(wg_transform_t transform, struct wg_sample *sample, - struct wg_format *format) +HRESULT wg_transform_read_data(wg_transform_t transform, struct wg_sample *sample) { struct wg_transform_read_data_params params = { .transform = transform, .sample = sample, - .format = format, }; NTSTATUS status; - TRACE("transform %#I64x, sample %p, format %p.\n", transform, sample, format); + TRACE("transform %#I64x, sample %p.\n", transform, sample); if ((status = WINE_UNIX_CALL(unix_wg_transform_read_data, ¶ms))) return HRESULT_FROM_NT(status); @@ -625,17 +797,43 @@ bool wg_transform_get_status(wg_transform_t transform, bool *accepts_input) return true; } -bool wg_transform_set_output_format(wg_transform_t transform, struct wg_format *format) +HRESULT wg_transform_get_output_type(wg_transform_t transform, IMFMediaType **media_type) { - struct wg_transform_set_output_format_params params = + struct wg_transform_get_output_type_params params = + { + .transform = transform, + }; + + TRACE("transform %#I64x, media_type %p.\n", transform, media_type); + + return wg_get_media_type_mf(unix_wg_transform_get_output_type, ¶ms, + ¶ms.media_type, media_type); +} + +HRESULT wg_transform_set_output_type(wg_transform_t transform, IMFMediaType *media_type) +{ + struct wg_transform_set_output_type_params params = { .transform = transform, - .format = format, }; + NTSTATUS status; + HRESULT hr; - TRACE("transform %#I64x, format %p.\n", transform, format); + TRACE("transform %#I64x, media_type %p.\n", transform, media_type); - return !WINE_UNIX_CALL(unix_wg_transform_set_output_format, ¶ms); + if (FAILED(hr = wg_media_type_from_mf(media_type, ¶ms.media_type))) + { + WARN("Failed to initialize media type, hr %#lx\n", hr); + return hr; + } + if ((status = WINE_UNIX_CALL(unix_wg_transform_set_output_type, ¶ms))) + { + WARN("Failed to set transform output type, status %#lx\n", status); + hr = HRESULT_FROM_NT(status); + } + + CoTaskMemFree(params.media_type.u.format); + return hr; } HRESULT wg_transform_drain(wg_transform_t transform) @@ -811,6 +1009,63 @@ HRESULT wg_muxer_finalize(wg_muxer_t muxer) return S_OK; } +static HRESULT check_transform_support(const struct wg_media_type *input, const struct wg_media_type *output) +{ + IMFMediaType *input_type, *output_type; + struct wg_transform_attrs attrs = {0}; + wg_transform_t transform; + HRESULT hr; + + if (FAILED(hr = wg_media_type_to_mf(input, &input_type))) + return hr; + if (FAILED(hr = wg_media_type_to_mf(output, &output_type))) + { + IMFMediaType_Release(input_type); + return hr; + } + + if (SUCCEEDED(hr = wg_transform_create_mf(input_type, output_type, &attrs, &transform))) + wg_transform_destroy(transform); + + IMFMediaType_Release(output_type); + IMFMediaType_Release(input_type); + return hr; +} + +HRESULT check_audio_transform_support(const WAVEFORMATEX *input, const WAVEFORMATEX *output) +{ + const struct wg_media_type input_type = + { + .major = MFMediaType_Audio, + .format_size = sizeof(*input) + input->cbSize, + .u.audio = (WAVEFORMATEX *)input, + }; + const struct wg_media_type output_type = + { + .major = MFMediaType_Audio, + .format_size = sizeof(*output) + output->cbSize, + .u.audio = (WAVEFORMATEX *)output, + }; + return check_transform_support(&input_type, &output_type); +} + +HRESULT check_video_transform_support(const MFVIDEOFORMAT *input, const MFVIDEOFORMAT *output) +{ + const struct wg_media_type input_type = + { + .major = MFMediaType_Video, + .format_size = input->dwSize, + .u.video = (MFVIDEOFORMAT *)input, + }; + const struct wg_media_type output_type = + { + .major = MFMediaType_Video, + .format_size = output->dwSize, + .u.video = (MFVIDEOFORMAT *)output, + }; + return check_transform_support(&input_type, &output_type); +} + #define ALIGN(n, alignment) (((n) + (alignment) - 1) & ~((alignment) - 1)) unsigned int wg_format_get_stride(const struct wg_format *format) @@ -824,6 +1079,7 @@ unsigned int wg_format_get_stride(const struct wg_format *format) case WG_VIDEO_FORMAT_BGRA: case WG_VIDEO_FORMAT_BGRx: + case WG_VIDEO_FORMAT_RGBA: return width * 4; case WG_VIDEO_FORMAT_BGR: @@ -857,9 +1113,9 @@ bool wg_video_format_is_rgb(enum wg_video_format format) case WG_VIDEO_FORMAT_BGRA: case WG_VIDEO_FORMAT_BGRx: case WG_VIDEO_FORMAT_BGR: - case WG_VIDEO_FORMAT_RGBA: case WG_VIDEO_FORMAT_RGB15: case WG_VIDEO_FORMAT_RGB16: + case WG_VIDEO_FORMAT_RGBA: return true; default: diff --git a/dlls/winegstreamer/media-converter/videoconv.c b/dlls/winegstreamer/media-converter/videoconv.c index d3cb624629f..eef5cf85e49 100644 --- a/dlls/winegstreamer/media-converter/videoconv.c +++ b/dlls/winegstreamer/media-converter/videoconv.c @@ -387,6 +387,8 @@ static int video_conv_state_create(struct video_conv_state **out) static void video_conv_state_release(struct video_conv_state *state) { + if ((state->state_flags & VIDEO_CONV_IS_DUMPING)) + pthread_mutex_unlock(&dump_fozdb.mutex); if (state->read_fozdb) fozdb_release(state->read_fozdb); close(state->blank_file); @@ -759,6 +761,23 @@ static gboolean video_conv_push_caps(VideoConv *conv, uint32_t transcode_tag) return ret; } +static gboolean video_conv_push_segment(VideoConv *conv) +{ + struct video_conv_state *state; + GstSegment segment; + + gst_segment_init(&segment, GST_FORMAT_BYTES); + if (!(state = video_conv_lock_state(conv))) + { + GST_ERROR("VideoConv not yet in READY state?"); + return false; + } + segment.stop = state->our_duration; + pthread_mutex_unlock(&conv->state_mutex); + + return push_event(conv->src_pad, gst_event_new_segment(&segment)); +} + static gboolean video_conv_sink_event_caps(VideoConv *conv, GstEvent *event) { struct video_conv_state *state; @@ -843,6 +862,8 @@ static gboolean video_conv_sink_event_eos(VideoConv *conv, GstEvent *event) return false; if (!video_conv_push_caps(conv, transcode_tag)) return false; + if (!video_conv_push_segment(conv)) + return false; /* return false to cancel upstream pads EOS event handling and avoid setting EOS flag */ return false; @@ -1259,7 +1280,7 @@ static void video_conv_init(VideoConv *conv) conv->active_mode = GST_PAD_MODE_NONE; } -static bool codec_info_to_wg_format(char *codec_info, struct wg_format *codec_format) +static bool codec_info_to_wg_format(char *codec_info, GstCaps *caps) { char *codec_name = codec_info; @@ -1271,63 +1292,65 @@ static bool codec_info_to_wg_format(char *codec_info, struct wg_format *codec_fo /* FIXME: Get width, height, fps etc. from codec info string. */ if (strcmp(codec_name, "cinepak") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_CINEPAK; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-cinepak"); } else if (strcmp(codec_name, "h264") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_H264; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-h264"); } else if (strcmp(codec_name, "wmv1") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_WMV; - codec_format->u.video.format = WG_VIDEO_FORMAT_WMV1; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-wmv"); + gst_caps_set_simple(caps, "wmvversion", G_TYPE_INT, 1, NULL); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, "WMV1", NULL); } else if (strcmp(codec_name, "wmv2") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_WMV; - codec_format->u.video.format = WG_VIDEO_FORMAT_WMV2; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-wmv"); + gst_caps_set_simple(caps, "wmvversion", G_TYPE_INT, 2, NULL); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, "WMV2", NULL); } else if (strcmp(codec_name, "wmv3") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_WMV; - codec_format->u.video.format = WG_VIDEO_FORMAT_WMV3; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-wmv"); + gst_caps_set_simple(caps, "wmvversion", G_TYPE_INT, 3, NULL); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, "WMV3", NULL); } else if (strcmp(codec_name, "vc1") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_VIDEO_WMV; - codec_format->u.video.format = WG_VIDEO_FORMAT_WVC1; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-wmv"); + gst_caps_set_simple(caps, "wmvversion", G_TYPE_INT, 3, NULL); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, "WVC1", NULL); } else if (strcmp(codec_name, "wmav1") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 1; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 1, NULL); } else if (strcmp(codec_name, "wmav2") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 2; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 2, NULL); } else if (strcmp(codec_name, "wmapro") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 3; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 3, NULL); } else if (strcmp(codec_name, "wmalossless") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 4; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 4, NULL); } else if (strcmp(codec_name, "xma1") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 1; - codec_format->u.audio.is_xma = true; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-xma"); + gst_caps_set_simple(caps, "xmaversion", G_TYPE_INT, 1, NULL); } else if (strcmp(codec_name, "xma2") == 0) { - codec_format->major_type = WG_MAJOR_TYPE_AUDIO_WMA; - codec_format->u.audio.version = 2; - codec_format->u.audio.is_xma = true; + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-xma"); + gst_caps_set_simple(caps, "xmaversion", G_TYPE_INT, 2, NULL); } else { @@ -1335,8 +1358,7 @@ static bool codec_info_to_wg_format(char *codec_info, struct wg_format *codec_fo return false; } - GST_INFO("Got codec format major type %u.", codec_format->major_type); - + GST_INFO("Got caps %" GST_PTR_FORMAT, caps); return true; } @@ -1367,7 +1389,7 @@ static GstElement *gst_bin_get_by_type(GstBin * bin, GType type) return element; } -bool get_untranscoded_stream_format(GstElement *container, uint32_t stream_index, struct wg_format *codec_format) +bool get_untranscoded_stream_format(GstElement *container, uint32_t stream_index, GstCaps *caps) { struct video_conv_state *state; uint8_t *buffer = NULL; @@ -1410,7 +1432,7 @@ bool get_untranscoded_stream_format(GstElement *container, uint32_t stream_index GST_INFO("Got codec info \"%s\" for stream %d.\n", codec_info, stream_index); - ret = codec_info_to_wg_format(codec_info, codec_format); + ret = codec_info_to_wg_format(codec_info, caps); done: if (buffer) diff --git a/dlls/winegstreamer/media_source.c b/dlls/winegstreamer/media_source.c index a4ac0085dea..eb6ab3d21cd 100644 --- a/dlls/winegstreamer/media_source.c +++ b/dlls/winegstreamer/media_source.c @@ -357,19 +357,6 @@ static HRESULT stream_descriptor_get_media_type(IMFStreamDescriptor *descriptor, return hr; } -static HRESULT wg_format_from_stream_descriptor(IMFStreamDescriptor *descriptor, struct wg_format *format) -{ - IMFMediaType *media_type; - HRESULT hr; - - if (FAILED(hr = stream_descriptor_get_media_type(descriptor, &media_type))) - return hr; - mf_media_type_to_wg_format(media_type, format); - IMFMediaType_Release(media_type); - - return hr; -} - static HRESULT stream_descriptor_set_tag(IMFStreamDescriptor *descriptor, wg_parser_stream_t stream, const GUID *attr, enum wg_parser_tag tag) { @@ -396,151 +383,31 @@ static HRESULT stream_descriptor_set_tag(IMFStreamDescriptor *descriptor, wg_par return hr; } -static HRESULT init_video_media_types(struct wg_format *format, IMFMediaType *types[9], DWORD *types_count) -{ - /* Try to prefer YUV formats over RGB ones. Most decoders output in the - * YUV color space, and it's generally much less expensive for - * videoconvert to do YUV -> YUV transformations. */ - static const enum wg_video_format video_formats[] = - { - WG_VIDEO_FORMAT_NV12, - WG_VIDEO_FORMAT_YV12, - WG_VIDEO_FORMAT_YUY2, - WG_VIDEO_FORMAT_I420, - WG_VIDEO_FORMAT_BGRA, - WG_VIDEO_FORMAT_BGRx, - WG_VIDEO_FORMAT_RGBA, - }; - UINT count = *types_count, i; - GUID base_subtype; - HRESULT hr; - - if (FAILED(hr = IMFMediaType_GetGUID(types[0], &MF_MT_SUBTYPE, &base_subtype))) - return hr; - - for (i = 0; i < ARRAY_SIZE(video_formats); ++i) - { - struct wg_format new_format = *format; - IMFMediaType *new_type; - - new_format.u.video.format = video_formats[i]; - - if (!(new_type = mf_media_type_from_wg_format(&new_format))) - { - hr = E_OUTOFMEMORY; - goto done; - } - types[count++] = new_type; - - if (video_formats[i] == WG_VIDEO_FORMAT_I420) - { - IMFMediaType *iyuv_type; - - if (FAILED(hr = MFCreateMediaType(&iyuv_type))) - goto done; - if (FAILED(hr = IMFMediaType_CopyAllItems(new_type, (IMFAttributes *)iyuv_type))) - goto done; - if (FAILED(hr = IMFMediaType_SetGUID(iyuv_type, &MF_MT_SUBTYPE, &MFVideoFormat_IYUV))) - goto done; - types[count++] = iyuv_type; - } - } - - for (i = 0; i < count; i++) - { - IMFMediaType_SetUINT32(types[i], &MF_MT_VIDEO_NOMINAL_RANGE, - MFNominalRange_Normal); - - { - /* HACK: Remove MF_MT_DEFAULT_STRIDE for games that incorrectly assume it doesn't change, - * workaround to fix 4e2d1f1d2ed6e57de9103c0fd43bce88e3ad4792 until media source stops decoding - * CW-Bug-Id: #23248 - */ - char const *sgi = getenv("SteamGameId"); - if (sgi && (!strcmp(sgi, "399810") || !strcmp(sgi, "851890") || !strcmp(sgi, "544750"))) - IMFMediaType_DeleteItem(types[i], &MF_MT_DEFAULT_STRIDE); - } - } - -done: - *types_count = count; - return hr; -} - -static HRESULT init_audio_media_types(struct wg_format *format, IMFMediaType *types[9], DWORD *types_count) -{ - /* Expose at least one PCM and one floating point type for the - consumer to pick from. Moreover, ensure that we expose S16LE first, - as games such as MGSV expect the native media type to be 16 bps. */ - static const enum wg_audio_format audio_types[] = - { - WG_AUDIO_FORMAT_S16LE, - WG_AUDIO_FORMAT_F32LE, - }; - UINT count = *types_count, i; - - BOOL has_native_format = FALSE; - - for (i = 0; i < ARRAY_SIZE(audio_types); i++) - { - struct wg_format new_format; - - new_format = *format; - new_format.u.audio.format = audio_types[i]; - if ((types[count] = mf_media_type_from_wg_format(&new_format))) - { - if (format->u.audio.format == audio_types[i]) - has_native_format = TRUE; - count++; - } - } - - if (!has_native_format && (types[count] = mf_media_type_from_wg_format(format))) - count++; - - *types_count = count; - return S_OK; -} - -static HRESULT stream_descriptor_create(UINT32 id, struct wg_format *format, IMFStreamDescriptor **out) +static HRESULT stream_descriptor_create(UINT32 id, wg_parser_stream_t wg_stream, IMFStreamDescriptor **out) { IMFStreamDescriptor *descriptor; IMFMediaTypeHandler *handler; - IMFMediaType *types[9]; - DWORD count = 0; + IMFMediaType *media_type; HRESULT hr; - if ((types[0] = mf_media_type_from_wg_format(format))) - count = 1; - - if (format->major_type == WG_MAJOR_TYPE_VIDEO) + if (FAILED(hr = wg_parser_stream_get_current_type_mf(wg_stream, &media_type))) + return MF_E_INVALIDMEDIATYPE; + if (FAILED(hr = MFCreateStreamDescriptor(id, 1, &media_type, &descriptor))) { - if (FAILED(hr = init_video_media_types(format, types, &count))) - goto done; - } - else if (format->major_type == WG_MAJOR_TYPE_AUDIO) - { - if (FAILED(hr = init_audio_media_types(format, types, &count))) - goto done; + IMFMediaType_Release(media_type); + return hr; } - assert(count <= ARRAY_SIZE(types)); - - if (FAILED(hr = MFCreateStreamDescriptor(id, count, types, &descriptor))) - goto done; - if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(descriptor, &handler))) IMFStreamDescriptor_Release(descriptor); else { - hr = IMFMediaTypeHandler_SetCurrentMediaType(handler, types[0]); + hr = IMFMediaTypeHandler_SetCurrentMediaType(handler, media_type); IMFMediaTypeHandler_Release(handler); } -done: - while (count--) - IMFMediaType_Release(types[count]); - *out = SUCCEEDED(hr) ? descriptor : NULL; + IMFMediaType_Release(media_type); + *out = descriptor; return hr; } @@ -599,14 +466,18 @@ static void flush_token_queue(struct media_stream *stream, BOOL send) static HRESULT media_stream_start(struct media_stream *stream, BOOL active, BOOL seeking, const PROPVARIANT *position) { struct media_source *source = impl_from_IMFMediaSource(stream->media_source); - struct wg_format format; + IMFMediaType *media_type; HRESULT hr; TRACE("source %p, stream %p\n", source, stream); - if (FAILED(hr = wg_format_from_stream_descriptor(stream->descriptor, &format))) - WARN("Failed to get wg_format from stream descriptor, hr %#lx\n", hr); - wg_parser_stream_enable(stream->wg_stream, &format, 0); + if (SUCCEEDED(hr = stream_descriptor_get_media_type(stream->descriptor, &media_type))) + { + hr = wg_parser_stream_enable_mf(stream->wg_stream, media_type); + IMFMediaType_Release(media_type); + } + if (FAILED(hr)) + WARN("Failed to start media source stream, hr %#lx\n", hr); if (FAILED(hr = IMFMediaEventQueue_QueueEventParamUnk(source->event_queue, active ? MEUpdatedStream : MENewStream, &GUID_NULL, S_OK, (IUnknown *)&stream->IMFMediaStream_iface))) @@ -1491,17 +1362,22 @@ static HRESULT WINAPI media_source_CreatePresentationDescriptor(IMFMediaSource * for (i = 0; i < source->stream_count; ++i) { - struct wg_format format; + IMFMediaType *media_type; + GUID major = GUID_NULL; - wg_format_from_stream_descriptor(source->descriptors[i], &format); + if (SUCCEEDED(hr = stream_descriptor_get_media_type(source->descriptors[i], &media_type))) + { + hr = IMFMediaType_GetGUID(media_type, &MF_MT_MAJOR_TYPE, &major); + IMFMediaType_Release(media_type); + } - if (format.major_type >= WG_MAJOR_TYPE_VIDEO) + if (IsEqualGUID(&major, &MFMediaType_Video)) { if (!video_selected && FAILED(hr = IMFPresentationDescriptor_SelectStream(*descriptor, i))) WARN("Failed to select stream %u, hr %#lx\n", i, hr); video_selected = TRUE; } - else if (format.major_type >= WG_MAJOR_TYPE_AUDIO) + else if (IsEqualGUID(&major, &MFMediaType_Audio)) { if (!audio_selected && FAILED(hr = IMFPresentationDescriptor_SelectStream(*descriptor, i))) WARN("Failed to select stream %u, hr %#lx\n", i, hr); @@ -1742,10 +1618,8 @@ static HRESULT media_source_create(struct object_context *context, IMFMediaSourc wg_parser_stream_t wg_stream = wg_parser_get_stream(object->wg_parser, i); IMFStreamDescriptor *descriptor; struct media_stream *stream; - struct wg_format format; - wg_parser_stream_get_preferred_format(wg_stream, &format); - if (FAILED(hr = stream_descriptor_create(i, &format, &descriptor))) + if (FAILED(hr = stream_descriptor_create(i, wg_stream, &descriptor))) goto fail; if (FAILED(hr = media_stream_create(&object->IMFMediaSource_iface, descriptor, wg_stream, &stream))) { diff --git a/dlls/winegstreamer/mfplat.c b/dlls/winegstreamer/mfplat.c index 27218569188..d565a2794a4 100644 --- a/dlls/winegstreamer/mfplat.c +++ b/dlls/winegstreamer/mfplat.c @@ -24,7 +24,6 @@ #include "ks.h" #include "ksmedia.h" #include "wmcodecdsp.h" -#include "initguid.h" #include "d3d9types.h" #include "mfapi.h" #include "mmreg.h" @@ -33,6 +32,8 @@ #include "wine/debug.h" #include "wine/list.h" +#include "initguid.h" + WINE_DEFAULT_DEBUG_CHANNEL(mfplat); DEFINE_GUID(DMOVideoFormat_RGB32,D3DFMT_X8R8G8B8,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); @@ -40,15 +41,17 @@ DEFINE_GUID(DMOVideoFormat_RGB24,D3DFMT_R8G8B8,0x524f,0x11ce,0x9f,0x53,0x00,0x20 DEFINE_GUID(DMOVideoFormat_RGB565,D3DFMT_R5G6B5,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); DEFINE_GUID(DMOVideoFormat_RGB555,D3DFMT_X1R5G5B5,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); DEFINE_GUID(DMOVideoFormat_RGB8,D3DFMT_P8,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); -DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32,D3DFMT_A8B8G8R8); DEFINE_MEDIATYPE_GUID(MFAudioFormat_RAW_AAC,WAVE_FORMAT_RAW_AAC1); DEFINE_MEDIATYPE_GUID(MFAudioFormat_XMAudio2, 0x0166); DEFINE_MEDIATYPE_GUID(MFVideoFormat_VC1S,MAKEFOURCC('V','C','1','S')); DEFINE_MEDIATYPE_GUID(MFVideoFormat_IV50,MAKEFOURCC('I','V','5','0')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32,D3DFMT_A8B8G8R8); DEFINE_MEDIATYPE_GUID(MFAudioFormat_GStreamer,MAKEFOURCC('G','S','T','a')); DEFINE_MEDIATYPE_GUID(MFVideoFormat_GStreamer,MAKEFOURCC('G','S','T','v')); DEFINE_GUID(MEDIASUBTYPE_WMV_Unknown, 0x7ce12ca9,0xbfbf,0x43d9,0x9d,0x00,0x82,0xb8,0xed,0x54,0x31,0x6b); +extern GUID MEDIASUBTYPE_VC1S; + struct class_factory { IClassFactory IClassFactory_iface; @@ -359,15 +362,19 @@ HRESULT mfplat_DllRegisterServer(void) }; MFT_REGISTER_TYPE_INFO video_decoder_output_types[] = { + {MFMediaType_Video, MFVideoFormat_NV12}, {MFMediaType_Video, MFVideoFormat_YV12}, + {MFMediaType_Video, MFVideoFormat_IYUV}, + {MFMediaType_Video, MFVideoFormat_I420}, {MFMediaType_Video, MFVideoFormat_YUY2}, {MFMediaType_Video, MFVideoFormat_NV11}, - {MFMediaType_Video, MFVideoFormat_NV12}, - {MFMediaType_Video, MFVideoFormat_RGB32}, - {MFMediaType_Video, MFVideoFormat_RGB24}, - {MFMediaType_Video, MFVideoFormat_RGB565}, - {MFMediaType_Video, MFVideoFormat_RGB555}, - {MFMediaType_Video, MFVideoFormat_RGB8}, + {MFMediaType_Video, MFVideoFormat_UYVY}, + {MFMediaType_Video, MFVideoFormat_YVYU}, + {MFMediaType_Video, DMOVideoFormat_RGB32}, + {MFMediaType_Video, DMOVideoFormat_RGB24}, + {MFMediaType_Video, DMOVideoFormat_RGB565}, + {MFMediaType_Video, DMOVideoFormat_RGB555}, + {MFMediaType_Video, DMOVideoFormat_RGB8}, }; struct mft @@ -486,6 +493,19 @@ HRESULT mfplat_DllRegisterServer(void) ARRAY_SIZE(color_convert_output_types), color_convert_output_types, }, + { + /* HACK: Register the video processor as a decoder too as + * the media source currently always decodes. + */ + CLSID_VideoProcessorMFT, + MFT_CATEGORY_VIDEO_DECODER, + L"Null Decoder", + MFT_ENUM_FLAG_SYNCMFT, + ARRAY_SIZE(video_processor_input_types), + video_processor_input_types, + ARRAY_SIZE(video_processor_output_types), + video_processor_output_types, + }, { CLSID_GStreamerAudioDecoder, MFT_CATEGORY_AUDIO_DECODER, @@ -506,19 +526,6 @@ HRESULT mfplat_DllRegisterServer(void) ARRAY_SIZE(video_decoder_output_types), video_decoder_output_types, }, - { - /* HACK: Register the video processor as a decoder too as - * the media source currently always decodes. - */ - CLSID_VideoProcessorMFT, - MFT_CATEGORY_VIDEO_DECODER, - L"Null Decoder", - MFT_ENUM_FLAG_SYNCMFT, - ARRAY_SIZE(video_processor_input_types), - video_processor_input_types, - ARRAY_SIZE(video_processor_output_types), - video_processor_output_types, - }, }; unsigned int i; @@ -549,9 +556,9 @@ video_formats[] = {&MFVideoFormat_ARGB32, WG_VIDEO_FORMAT_BGRA}, {&MFVideoFormat_RGB32, WG_VIDEO_FORMAT_BGRx}, {&MFVideoFormat_RGB24, WG_VIDEO_FORMAT_BGR}, - {&MFVideoFormat_ABGR32, WG_VIDEO_FORMAT_RGBA}, {&MFVideoFormat_RGB555, WG_VIDEO_FORMAT_RGB15}, {&MFVideoFormat_RGB565, WG_VIDEO_FORMAT_RGB16}, + {&MFVideoFormat_ABGR32, WG_VIDEO_FORMAT_RGBA}, {&MFVideoFormat_AYUV, WG_VIDEO_FORMAT_AYUV}, {&MFVideoFormat_I420, WG_VIDEO_FORMAT_I420}, {&MFVideoFormat_IYUV, WG_VIDEO_FORMAT_I420}, @@ -672,74 +679,6 @@ static IMFMediaType *mf_media_type_from_wg_format_video(const struct wg_format * return NULL; } -static IMFMediaType *mf_media_type_from_wg_format_audio_encoded(const struct wg_format *format) -{ - IMFMediaType *type; - UINT32 value; - HRESULT hr; - - if (FAILED(MFCreateMediaType(&type))) - return NULL; - if (FAILED(hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Audio))) - goto done; - if (FAILED(hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFAudioFormat_GStreamer))) - goto done; - - value = format->u.audio.rate; - if (value && FAILED(hr = IMFMediaType_SetUINT32(type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, value))) - goto done; - value = format->u.audio.channels; - if (value && FAILED(hr = IMFMediaType_SetUINT32(type, &MF_MT_AUDIO_NUM_CHANNELS, value))) - goto done; - if (FAILED(hr = IMFMediaType_SetBlob(type, &MF_MT_USER_DATA, (BYTE *)format->u.audio.caps, - strlen(format->u.audio.caps) + 1))) - goto done; - -done: - if (FAILED(hr)) - { - IMFMediaType_Release(type); - return NULL; - } - return type; -} - -static IMFMediaType *mf_media_type_from_wg_format_video_encoded(const struct wg_format *format) -{ - UINT64 frame_rate, frame_size; - IMFMediaType *type; - HRESULT hr; - - if (FAILED(MFCreateMediaType(&type))) - return NULL; - if (FAILED(hr = IMFMediaType_SetGUID(type, &MF_MT_MAJOR_TYPE, &MFMediaType_Video))) - goto done; - if (FAILED(hr = IMFMediaType_SetGUID(type, &MF_MT_SUBTYPE, &MFVideoFormat_GStreamer))) - goto done; - if (FAILED(hr = IMFMediaType_SetUINT32(type, &MF_MT_VIDEO_ROTATION, MFVideoRotationFormat_0))) - goto done; - if (FAILED(hr = IMFMediaType_SetUINT32(type, &MF_MT_COMPRESSED, TRUE))) - goto done; - - frame_size = (UINT64)format->u.video.width << 32 | format->u.video.height; - if (FAILED(hr = IMFMediaType_SetUINT64(type, &MF_MT_FRAME_SIZE, frame_size))) - goto done; - frame_rate = (UINT64)format->u.video.fps_n << 32 | format->u.video.fps_d; - if (FAILED(hr = IMFMediaType_SetUINT64(type, &MF_MT_FRAME_RATE, frame_rate))) - goto done; - if (FAILED(hr = IMFMediaType_SetBlob(type, &MF_MT_USER_DATA, (BYTE *)format->u.video.caps, - strlen(format->u.video.caps) + 1))) - goto done; - -done: - if (FAILED(hr)) - { - IMFMediaType_Release(type); - return NULL; - } - return type; -} - IMFMediaType *mf_media_type_from_wg_format(const struct wg_format *format) { switch (format->major_type) @@ -759,13 +698,9 @@ IMFMediaType *mf_media_type_from_wg_format(const struct wg_format *format) case WG_MAJOR_TYPE_AUDIO: return mf_media_type_from_wg_format_audio(format); - case WG_MAJOR_TYPE_AUDIO_ENCODED: - return mf_media_type_from_wg_format_audio_encoded(format); case WG_MAJOR_TYPE_VIDEO: return mf_media_type_from_wg_format_video(format); - case WG_MAJOR_TYPE_VIDEO_ENCODED: - return mf_media_type_from_wg_format_video_encoded(format); } assert(0); @@ -825,6 +760,61 @@ static void mf_media_type_to_wg_format_audio(IMFMediaType *type, const GUID *sub FIXME("Unrecognized audio subtype %s, depth %u.\n", debugstr_guid(subtype), depth); } +static void mf_media_type_to_wg_format_audio_mpeg(IMFMediaType *type, const GUID *subtype, struct wg_format *format) +{ + MPEG1WAVEFORMAT wfx = {0}; + UINT32 codec_data_size; + UINT32 rate, channels; + + if (FAILED(IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &rate))) + { + FIXME("Sample rate is not set.\n"); + return; + } + if (FAILED(IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_NUM_CHANNELS, &channels))) + { + FIXME("Channel count is not set.\n"); + return; + } + if (FAILED(IMFMediaType_GetBlob(type, &MF_MT_USER_DATA, (UINT8 *)(&wfx.wfx + 1), + sizeof(wfx) - sizeof(WAVEFORMATEX), &codec_data_size))) + { + FIXME("Codec data is not set.\n"); + return; + } + if (codec_data_size < sizeof(wfx) - sizeof(WAVEFORMATEX)) + { + FIXME("Codec data is incomplete.\n"); + return; + } + + format->major_type = WG_MAJOR_TYPE_AUDIO_MPEG1; + format->u.audio.channels = channels; + format->u.audio.rate = rate; + format->u.audio.layer = wfx.fwHeadLayer; +} + +static void mf_media_type_to_wg_format_audio_mpeg_layer3(IMFMediaType *type, const GUID *subtype, struct wg_format *format) +{ + UINT32 rate, channels; + + if (FAILED(IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &rate))) + { + FIXME("Sample rate is not set.\n"); + return; + } + if (FAILED(IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_NUM_CHANNELS, &channels))) + { + FIXME("Channel count is not set.\n"); + return; + } + + format->major_type = WG_MAJOR_TYPE_AUDIO_MPEG1; + format->u.audio.channels = channels; + format->u.audio.rate = rate; + format->u.audio.layer = 3; +} + static void mf_media_type_to_wg_format_audio_mpeg4(IMFMediaType *type, const GUID *subtype, struct wg_format *format) { BYTE buffer[sizeof(HEAACWAVEFORMAT) + 64]; @@ -861,29 +851,6 @@ static void mf_media_type_to_wg_format_audio_mpeg4(IMFMediaType *type, const GUI format->u.audio.codec_data_len = codec_data_size; } -static void mf_media_type_to_wg_format_audio_encoded(IMFMediaType *type, struct wg_format *format) -{ - UINT32 caps_len; - BYTE *caps; - HRESULT hr; - - memset(format, 0, sizeof(*format)); - format->major_type = WG_MAJOR_TYPE_AUDIO_ENCODED; - - if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &format->u.audio.rate))) - WARN("Failed to get MF_MT_AUDIO_SAMPLES_PER_SECOND for type %p, hr %#lx.\n", type, hr); - if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_NUM_CHANNELS, &format->u.audio.channels))) - WARN("Failed to get MF_MT_AUDIO_NUM_CHANNELS for type %p, hr %#lx.\n", type, hr); - - if (FAILED(hr = IMFMediaType_GetAllocatedBlob(type, &MF_MT_USER_DATA, &caps, &caps_len))) - WARN("Failed to get MF_MT_USER_DATA for type %p, hr %#lx.\n", type, hr); - else - { - strcpy(format->u.audio.caps, (char *)caps); - CoTaskMemFree(caps); - } -} - static enum wg_video_format mf_video_format_to_wg(const GUID *subtype) { unsigned int i; @@ -1140,42 +1107,6 @@ static void mf_media_type_to_wg_format_video_wmv(IMFMediaType *type, const GUID } } - -static void mf_media_type_to_wg_format_video_encoded(IMFMediaType *type, struct wg_format *format) -{ - UINT64 frame_rate, frame_size; - UINT32 caps_len; - HRESULT hr; - BYTE *caps; - - memset(format, 0, sizeof(*format)); - format->major_type = WG_MAJOR_TYPE_VIDEO_ENCODED; - - if (FAILED(hr = IMFMediaType_GetUINT64(type, &MF_MT_FRAME_SIZE, &frame_size))) - WARN("Failed to get MF_MT_FRAME_SIZE for type %p, hr %#lx.\n", type, hr); - else - { - format->u.video.width = frame_size >> 32; - format->u.video.height = (UINT32)frame_size; - } - - if (FAILED(IMFMediaType_GetUINT64(type, &MF_MT_FRAME_RATE, &frame_rate)) && (UINT32)frame_rate) - WARN("Failed to get MF_MT_FRAME_RATE for type %p, hr %#lx.\n", type, hr); - else - { - format->u.video.fps_n = frame_rate >> 32; - format->u.video.fps_d = (UINT32)frame_rate; - } - - if (FAILED(hr = IMFMediaType_GetAllocatedBlob(type, &MF_MT_USER_DATA, &caps, &caps_len))) - WARN("Failed to get MF_MT_USER_DATA for type %p, hr %#lx.\n", type, hr); - else - { - strcpy(format->u.video.caps, (char *)caps); - CoTaskMemFree(caps); - } -} - void mf_media_type_to_wg_format(IMFMediaType *type, struct wg_format *format) { GUID major_type, subtype; @@ -1195,7 +1126,11 @@ void mf_media_type_to_wg_format(IMFMediaType *type, struct wg_format *format) if (IsEqualGUID(&major_type, &MFMediaType_Audio)) { - if (IsEqualGUID(&subtype, &MEDIASUBTYPE_MSAUDIO1) || + if (IsEqualGUID(&subtype, &MFAudioFormat_MPEG)) + mf_media_type_to_wg_format_audio_mpeg(type, &subtype, format); + else if (IsEqualGUID(&subtype, &MFAudioFormat_MP3)) + mf_media_type_to_wg_format_audio_mpeg_layer3(type, &subtype, format); + else if (IsEqualGUID(&subtype, &MEDIASUBTYPE_MSAUDIO1) || IsEqualGUID(&subtype, &MFAudioFormat_WMAudioV8) || IsEqualGUID(&subtype, &MFAudioFormat_WMAudioV9) || IsEqualGUID(&subtype, &MFAudioFormat_WMAudio_Lossless) || @@ -1203,8 +1138,6 @@ void mf_media_type_to_wg_format(IMFMediaType *type, struct wg_format *format) mf_media_type_to_wg_format_audio_wma(type, &subtype, format); else if (IsEqualGUID(&subtype, &MFAudioFormat_AAC) || IsEqualGUID(&subtype, &MFAudioFormat_RAW_AAC)) mf_media_type_to_wg_format_audio_mpeg4(type, &subtype, format); - else if (IsEqualGUID(&subtype, &MFAudioFormat_GStreamer)) - mf_media_type_to_wg_format_audio_encoded(type, format); else mf_media_type_to_wg_format_audio(type, &subtype, format); } @@ -1224,8 +1157,6 @@ void mf_media_type_to_wg_format(IMFMediaType *type, struct wg_format *format) || IsEqualGUID(&subtype, &MEDIASUBTYPE_WMV3) || IsEqualGUID(&subtype, &MFVideoFormat_VC1S)) mf_media_type_to_wg_format_video_wmv(type, &subtype, format); - else if (IsEqualGUID(&subtype, &MFVideoFormat_GStreamer)) - mf_media_type_to_wg_format_video_encoded(type, format); else mf_media_type_to_wg_format_video(type, &subtype, format); } diff --git a/dlls/winegstreamer/new_media_source.c b/dlls/winegstreamer/new_media_source.c index efaf6a3e087..a97d659e58d 100644 --- a/dlls/winegstreamer/new_media_source.c +++ b/dlls/winegstreamer/new_media_source.c @@ -40,6 +40,7 @@ struct object_context WCHAR *url; BYTE *buffer; + UINT64 read_offset; wg_source_t wg_source; WCHAR mime_type[256]; UINT32 stream_count; @@ -152,93 +153,6 @@ static HRESULT object_context_create(DWORD flags, IMFByteStream *stream, const W return S_OK; } -struct media_source_fallback_callback -{ - IMFAsyncCallback IMFAsyncCallback_iface; - IMFAsyncResult *result; - HANDLE event; -}; - -static HRESULT WINAPI media_source_fallback_callback_QueryInterface(IMFAsyncCallback *iface, REFIID riid, void **obj) -{ - if (IsEqualIID(riid, &IID_IMFAsyncCallback) || - IsEqualIID(riid, &IID_IUnknown)) - { - *obj = iface; - IMFAsyncCallback_AddRef(iface); - return S_OK; - } - - WARN("Unsupported %s.\n", debugstr_guid(riid)); - *obj = NULL; - return E_NOINTERFACE; -} - -static ULONG WINAPI media_source_fallback_callback_AddRef(IMFAsyncCallback *iface) -{ - return 2; -} - -static ULONG WINAPI media_source_fallback_callback_Release(IMFAsyncCallback *iface) -{ - return 1; -} - -static HRESULT WINAPI media_source_fallback_callback_GetParameters(IMFAsyncCallback *iface, DWORD *flags, DWORD *queue) -{ - return E_NOTIMPL; -} - -static HRESULT WINAPI media_source_fallback_callback_Invoke(IMFAsyncCallback *iface, IMFAsyncResult *result) -{ - struct media_source_fallback_callback *impl = CONTAINING_RECORD(iface, struct media_source_fallback_callback, IMFAsyncCallback_iface); - - IMFAsyncResult_AddRef((impl->result = result)); - SetEvent(impl->event); - - return S_OK; -} - -static const IMFAsyncCallbackVtbl media_source_fallback_callback_vtbl = -{ - media_source_fallback_callback_QueryInterface, - media_source_fallback_callback_AddRef, - media_source_fallback_callback_Release, - media_source_fallback_callback_GetParameters, - media_source_fallback_callback_Invoke, -}; - -static HRESULT create_media_source_fallback(struct object_context *context, IUnknown **object) -{ - static const GUID CLSID_GStreamerByteStreamHandler = {0x317df618, 0x5e5a, 0x468a, {0x9f, 0x15, 0xd8, 0x27, 0xa9, 0xa0, 0x81, 0x62}}; - struct media_source_fallback_callback callback = {{&media_source_fallback_callback_vtbl}}; - IMFByteStreamHandler *handler; - MF_OBJECT_TYPE type; - HRESULT hr; - - if (!(callback.event = CreateEventW(NULL, FALSE, FALSE, NULL))) - return HRESULT_FROM_WIN32(GetLastError()); - - if (FAILED(hr = CoCreateInstance(&CLSID_GStreamerByteStreamHandler, NULL, CLSCTX_INPROC_SERVER, - &IID_IMFByteStreamHandler, (void **)&handler))) - { - CloseHandle(callback.event); - return hr; - } - - if (SUCCEEDED(hr = IMFByteStreamHandler_BeginCreateObject(handler, context->stream, NULL, MF_RESOLUTION_MEDIASOURCE, - NULL, NULL, &callback.IMFAsyncCallback_iface, NULL))) - { - WaitForSingleObject(callback.event, INFINITE); - hr = IMFByteStreamHandler_EndCreateObject(handler, callback.result, &type, object); - IMFAsyncResult_Release(callback.result); - } - - IMFByteStreamHandler_Release(handler); - CloseHandle(callback.event); - return hr; -} - struct media_stream { IMFMediaStream IMFMediaStream_iface; @@ -456,19 +370,6 @@ static ULONG WINAPI source_async_commands_callback_Release(IMFAsyncCallback *ifa return IMFMediaSource_Release(&source->IMFMediaSource_iface); } -static HRESULT stream_descriptor_get_media_type(IMFStreamDescriptor *descriptor, IMFMediaType **media_type) -{ - IMFMediaTypeHandler *handler; - HRESULT hr; - - if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(descriptor, &handler))) - return hr; - hr = IMFMediaTypeHandler_GetCurrentMediaType(handler, media_type); - IMFMediaTypeHandler_Release(handler); - - return hr; -} - static HRESULT stream_descriptor_get_major_type(IMFStreamDescriptor *descriptor, GUID *major) { IMFMediaTypeHandler *handler; @@ -482,19 +383,6 @@ static HRESULT stream_descriptor_get_major_type(IMFStreamDescriptor *descriptor, return hr; } -static HRESULT wg_format_from_stream_descriptor(IMFStreamDescriptor *descriptor, struct wg_format *format) -{ - IMFMediaType *media_type; - HRESULT hr; - - if (FAILED(hr = stream_descriptor_get_media_type(descriptor, &media_type))) - return hr; - mf_media_type_to_wg_format(media_type, format); - IMFMediaType_Release(media_type); - - return hr; -} - static HRESULT stream_descriptor_set_tag(IMFStreamDescriptor *descriptor, wg_source_t source, UINT index, const GUID *attr, enum wg_parser_tag tag) { @@ -521,29 +409,25 @@ static HRESULT stream_descriptor_set_tag(IMFStreamDescriptor *descriptor, return hr; } -static HRESULT stream_descriptor_create(UINT32 id, struct wg_format *format, IMFStreamDescriptor **out) +static HRESULT stream_descriptor_create(UINT32 id, IMFMediaType *media_type, IMFStreamDescriptor **out) { IMFStreamDescriptor *descriptor; IMFMediaTypeHandler *handler; - IMFMediaType *type; HRESULT hr; - if (!(type = mf_media_type_from_wg_format(format))) - return MF_E_INVALIDMEDIATYPE; - if (FAILED(hr = MFCreateStreamDescriptor(id, 1, &type, &descriptor))) - goto done; + *out = NULL; + if (FAILED(hr = MFCreateStreamDescriptor(id, 1, &media_type, &descriptor))) + return hr; if (FAILED(hr = IMFStreamDescriptor_GetMediaTypeHandler(descriptor, &handler))) IMFStreamDescriptor_Release(descriptor); else { - hr = IMFMediaTypeHandler_SetCurrentMediaType(handler, type); + if (SUCCEEDED(hr = IMFMediaTypeHandler_SetCurrentMediaType(handler, media_type))) + *out = descriptor; IMFMediaTypeHandler_Release(handler); } -done: - IMFMediaType_Release(type); - *out = SUCCEEDED(hr) ? descriptor : NULL; return hr; } @@ -602,14 +486,10 @@ static void flush_token_queue(struct media_stream *stream, BOOL send) static HRESULT media_stream_start(struct media_stream *stream, BOOL active, BOOL seeking, const PROPVARIANT *position) { struct media_source *source = impl_from_IMFMediaSource(stream->media_source); - struct wg_format format; HRESULT hr; TRACE("source %p, stream %p\n", source, stream); - if (FAILED(hr = wg_format_from_stream_descriptor(stream->descriptor, &format))) - WARN("Failed to get wg_format from stream descriptor, hr %#lx\n", hr); - if (FAILED(hr = IMFMediaEventQueue_QueueEventParamUnk(source->event_queue, active ? MEUpdatedStream : MENewStream, &GUID_NULL, S_OK, (IUnknown *)&stream->IMFMediaStream_iface))) WARN("Failed to send source stream event, hr %#lx\n", hr); @@ -774,8 +654,8 @@ static HRESULT media_stream_send_eos(struct media_source *source, struct media_s static HRESULT wait_on_sample(struct media_stream *stream, IUnknown *token) { struct media_source *source = impl_from_IMFMediaSource(stream->media_source); + UINT64 read_offset, position; DWORD id, read_size; - UINT64 read_offset; IMFSample *sample; HRESULT hr; @@ -789,11 +669,21 @@ static HRESULT wait_on_sample(struct media_stream *stream, IUnknown *token) { if (FAILED(hr = wg_source_get_position(source->wg_source, &read_offset))) break; - if (FAILED(hr = IMFByteStream_SetCurrentPosition(source->byte_stream, read_offset))) - WARN("Failed to seek stream to %#I64x, hr %#lx\n", read_offset, hr); + if (read_offset >= source->file_size) + { + if (FAILED(hr = wg_source_push_data(source->wg_source, read_offset, NULL, 0))) + WARN("Failed to push %#lx bytes to source, hr %#lx\n", read_size, hr); + continue; + } + + if (FAILED(hr = IMFByteStream_GetCurrentPosition(source->byte_stream, &position))) + WARN("Failed to get current byte stream position, hr %#lx\n", hr); + else if (position != (read_offset = min(read_offset, source->file_size)) + && FAILED(hr = IMFByteStream_SetCurrentPosition(source->byte_stream, read_offset))) + WARN("Failed to set current byte stream position, hr %#lx\n", hr); else if (FAILED(hr = IMFByteStream_Read(source->byte_stream, source->read_buffer, SOURCE_BUFFER_SIZE, &read_size))) WARN("Failed to read %#lx bytes from stream, hr %#lx\n", read_size, hr); - else if (FAILED(hr = wg_source_push_data(source->wg_source, source->read_buffer, read_size))) + else if (FAILED(hr = wg_source_push_data(source->wg_source, read_offset, source->read_buffer, read_size))) WARN("Failed to push %#lx bytes to source, hr %#lx\n", read_size, hr); } @@ -1549,8 +1439,9 @@ static const IMFMediaSourceVtbl IMFMediaSource_vtbl = static void media_source_init_stream_map(struct media_source *source, UINT stream_count) { - struct wg_format format; + IMFMediaType *media_type; int i, n = 0; + GUID major; if (wcscmp(source->mime_type, L"video/mp4")) { @@ -1564,26 +1455,40 @@ static void media_source_init_stream_map(struct media_source *source, UINT strea for (i = stream_count - 1; i >= 0; i--) { - wg_source_get_stream_format(source->wg_source, i, &format); - if (format.major_type == WG_MAJOR_TYPE_UNKNOWN) continue; - if (format.major_type >= WG_MAJOR_TYPE_VIDEO) continue; - TRACE("mapping stream %u to wg_source stream %u\n", n, i); - source->stream_map[n++] = i; + if (SUCCEEDED(wg_source_get_stream_type(source->wg_source, i, &media_type))) + { + if (SUCCEEDED(IMFMediaType_GetMajorType(media_type, &major)) + && IsEqualGUID(&major, &MFMediaType_Video)) + { + TRACE("mapping stream %u to wg_source stream %u\n", n, i); + source->stream_map[n++] = i; + } + } } for (i = stream_count - 1; i >= 0; i--) { - wg_source_get_stream_format(source->wg_source, i, &format); - if (format.major_type == WG_MAJOR_TYPE_UNKNOWN) continue; - if (format.major_type < WG_MAJOR_TYPE_VIDEO) continue; - TRACE("mapping stream %u to wg_source stream %u\n", n, i); - source->stream_map[n++] = i; + if (SUCCEEDED(wg_source_get_stream_type(source->wg_source, i, &media_type))) + { + if (SUCCEEDED(IMFMediaType_GetMajorType(media_type, &major)) + && IsEqualGUID(&major, &MFMediaType_Audio)) + { + TRACE("mapping stream %u to wg_source stream %u\n", n, i); + source->stream_map[n++] = i; + } + } } for (i = stream_count - 1; i >= 0; i--) { - wg_source_get_stream_format(source->wg_source, i, &format); - if (format.major_type != WG_MAJOR_TYPE_UNKNOWN) continue; - TRACE("mapping stream %u to wg_source stream %u\n", n, i); - source->stream_map[n++] = i; + if (SUCCEEDED(wg_source_get_stream_type(source->wg_source, i, &media_type))) + { + if (FAILED(IMFMediaType_GetMajorType(media_type, &major)) + || (!IsEqualGUID(&major, &MFMediaType_Audio) + && !IsEqualGUID(&major, &MFMediaType_Audio))) + { + TRACE("mapping stream %u to wg_source stream %u\n", n, i); + source->stream_map[n++] = i; + } + } } } @@ -1699,12 +1604,16 @@ static HRESULT media_source_create(struct object_context *context, IMFMediaSourc { IMFStreamDescriptor *descriptor; struct media_stream *stream; - struct wg_format format; + IMFMediaType *media_type; - if (FAILED(hr = wg_source_get_stream_format(object->wg_source, object->stream_map[i], &format))) + if (FAILED(hr = wg_source_get_stream_type(object->wg_source, object->stream_map[i], &media_type))) goto fail; - if (FAILED(hr = stream_descriptor_create(i + 1, &format, &descriptor))) + if (FAILED(hr = stream_descriptor_create(i + 1, media_type, &descriptor))) + { + IMFMediaType_Release(media_type); goto fail; + } + IMFMediaType_Release(media_type); if (FAILED(hr = media_stream_create(&object->IMFMediaSource_iface, descriptor, &stream))) { IMFStreamDescriptor_Release(descriptor); @@ -2021,7 +1930,6 @@ static HRESULT WINAPI stream_handler_callback_Invoke(IMFAsyncCallback *iface, IM IUnknown *object, *state = IMFAsyncResult_GetStateNoAddRef(result); struct object_context *context; struct result_entry *entry; - UINT64 read_offset; DWORD size = 0; HRESULT hr; @@ -2033,36 +1941,33 @@ static HRESULT WINAPI stream_handler_callback_Invoke(IMFAsyncCallback *iface, IM else if (!context->wg_source && FAILED(hr = wg_source_create(context->url, context->file_size, context->buffer, size, context->mime_type, &context->wg_source))) WARN("Failed to create wg_source, hr %#lx\n", hr); - else if (FAILED(hr = wg_source_push_data(context->wg_source, context->buffer, size))) + else if (FAILED(hr = wg_source_push_data(context->wg_source, context->read_offset, context->buffer, size))) WARN("Failed to push wg_source data, hr %#lx\n", hr); - else if (FAILED(hr = wg_source_get_stream_count(context->wg_source, &context->stream_count))) - WARN("Failed to get wg_source status, hr %#lx\n", hr); - else if (!context->stream_count) + else while (SUCCEEDED(hr)) { - QWORD position, offset; - if (FAILED(hr = wg_source_get_position(context->wg_source, &read_offset))) + UINT32 read_size; + QWORD position; + + if (FAILED(hr = wg_source_get_stream_count(context->wg_source, &context->stream_count))) + WARN("Failed to get source stream count, hr %#lx\n", hr); + else if (context->stream_count) + break; + else if (FAILED(hr = wg_source_get_position(context->wg_source, &context->read_offset))) WARN("Failed to get wg_source position, hr %#lx\n", hr); else if (FAILED(hr = IMFByteStream_GetCurrentPosition(context->stream, &position))) WARN("Failed to get current byte stream position, hr %#lx\n", hr); - else if (position != (offset = min(read_offset, context->file_size)) - && FAILED(hr = IMFByteStream_SetCurrentPosition(context->stream, offset))) + else if (position != (context->read_offset = min(context->read_offset, context->file_size)) + && FAILED(hr = IMFByteStream_SetCurrentPosition(context->stream, context->read_offset))) WARN("Failed to set current byte stream position, hr %#lx\n", hr); - else - { - UINT32 read_size = min(SOURCE_BUFFER_SIZE, context->file_size - offset); + else if ((read_size = min(SOURCE_BUFFER_SIZE, context->file_size - context->read_offset))) return IMFByteStream_BeginRead(context->stream, context->buffer, read_size, &handler->IMFAsyncCallback_iface, state); - } - } - else if (FAILED(hr = media_source_create(context, (IMFMediaSource **)&object))) - WARN("Failed to create media source, hr %#lx\n", hr); - - if (FAILED(hr)) - { - FIXME("Falling back to old media source, hr %#lx\n", hr); - hr = create_media_source_fallback(context, (IUnknown **)&object); + else if (FAILED(hr = wg_source_push_data(context->wg_source, context->read_offset, NULL, 0))) + WARN("Failed to push wg_source data, hr %#lx\n", hr); } + if (SUCCEEDED(hr) && FAILED(hr = media_source_create(context, (IMFMediaSource **)&object))) + WARN("Failed to create media source, hr %#lx\n", hr); if (SUCCEEDED(hr)) { if (FAILED(hr = result_entry_create(context->result, MF_OBJECT_MEDIASOURCE, object, &entry))) diff --git a/dlls/winegstreamer/quartz_parser.c b/dlls/winegstreamer/quartz_parser.c index e7f7e767136..5d34ea2b74a 100644 --- a/dlls/winegstreamer/quartz_parser.c +++ b/dlls/winegstreamer/quartz_parser.c @@ -27,10 +27,9 @@ #include #include "dvdmedia.h" +#include "d3d9types.h" #include "mmreg.h" #include "ks.h" -#include "mfapi.h" -#include "d3d9types.h" #include "wmcodecdsp.h" #include "initguid.h" #include "ksmedia.h" @@ -41,8 +40,8 @@ static const GUID MEDIASUBTYPE_CVID = {mmioFOURCC('c','v','i','d'), 0x0000, 0x00 static const GUID MEDIASUBTYPE_VC1S = {mmioFOURCC('V','C','1','S'), 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; static const GUID MEDIASUBTYPE_MP3 = {WAVE_FORMAT_MPEGLAYER3, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; static const GUID MEDIASUBTYPE_WMV_Unknown = {0x7ce12ca9, 0xbfbf, 0x43d9, {0x9d, 0x00, 0x82, 0xb8, 0xed, 0x54, 0x31, 0x6b}}; +DEFINE_GUID(MEDIASUBTYPE_ABGR32,D3DFMT_A8B8G8R8,0x524f,0x11ce,0x9f,0x53,0x00,0x20,0xaf,0x0b,0xa7,0x70); static const GUID MEDIASUBTYPE_XMAUDIO2 = {0x0166, 0x0000, 0x0010, {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}}; -extern const GUID MFVideoFormat_ABGR32; struct parser { @@ -357,8 +356,8 @@ static unsigned int wg_format_get_max_size_video_raw(enum wg_video_format format { case WG_VIDEO_FORMAT_BGRA: case WG_VIDEO_FORMAT_BGRx: - case WG_VIDEO_FORMAT_RGBA: case WG_VIDEO_FORMAT_AYUV: + case WG_VIDEO_FORMAT_RGBA: return width * height * 4; case WG_VIDEO_FORMAT_BGR: @@ -464,10 +463,8 @@ unsigned int wg_format_get_max_size(const struct wg_format *format) return format->u.audio.rate * format->u.audio.channels * format->u.audio.depth / 8; case WG_MAJOR_TYPE_AUDIO_MPEG4: - case WG_MAJOR_TYPE_AUDIO_ENCODED: case WG_MAJOR_TYPE_VIDEO_H264: case WG_MAJOR_TYPE_VIDEO_INDEO: - case WG_MAJOR_TYPE_VIDEO_ENCODED: FIXME("Format %u not implemented!\n", format->major_type); return 0; @@ -488,9 +485,9 @@ static const GUID *wg_video_format_get_mediasubtype(enum wg_video_format format) case WG_VIDEO_FORMAT_BGRA: return &MEDIASUBTYPE_ARGB32; case WG_VIDEO_FORMAT_BGRx: return &MEDIASUBTYPE_RGB32; case WG_VIDEO_FORMAT_BGR: return &MEDIASUBTYPE_RGB24; - case WG_VIDEO_FORMAT_RGBA: return &MFVideoFormat_ABGR32; case WG_VIDEO_FORMAT_RGB15: return &MEDIASUBTYPE_RGB555; case WG_VIDEO_FORMAT_RGB16: return &MEDIASUBTYPE_RGB565; + case WG_VIDEO_FORMAT_RGBA: return &MEDIASUBTYPE_ABGR32; case WG_VIDEO_FORMAT_AYUV: return &MEDIASUBTYPE_AYUV; case WG_VIDEO_FORMAT_I420: return &MEDIASUBTYPE_I420; case WG_VIDEO_FORMAT_NV12: return &MEDIASUBTYPE_NV12; @@ -517,9 +514,9 @@ static DWORD wg_video_format_get_compression(enum wg_video_format format) case WG_VIDEO_FORMAT_BGRA: return BI_RGB; case WG_VIDEO_FORMAT_BGRx: return BI_RGB; case WG_VIDEO_FORMAT_BGR: return BI_RGB; - case WG_VIDEO_FORMAT_RGBA: return BI_RGB; case WG_VIDEO_FORMAT_RGB15: return BI_RGB; case WG_VIDEO_FORMAT_RGB16: return BI_BITFIELDS; + case WG_VIDEO_FORMAT_RGBA: return BI_RGB; case WG_VIDEO_FORMAT_AYUV: return mmioFOURCC('A','Y','U','V'); case WG_VIDEO_FORMAT_I420: return mmioFOURCC('I','4','2','0'); case WG_VIDEO_FORMAT_NV12: return mmioFOURCC('N','V','1','2'); @@ -541,9 +538,9 @@ static WORD wg_video_format_get_depth(enum wg_video_format format) case WG_VIDEO_FORMAT_BGRA: return 32; case WG_VIDEO_FORMAT_BGRx: return 32; case WG_VIDEO_FORMAT_BGR: return 24; - case WG_VIDEO_FORMAT_RGBA: return 32; case WG_VIDEO_FORMAT_RGB15: return 16; case WG_VIDEO_FORMAT_RGB16: return 16; + case WG_VIDEO_FORMAT_RGBA: return 32; case WG_VIDEO_FORMAT_AYUV: return 32; case WG_VIDEO_FORMAT_I420: return 12; case WG_VIDEO_FORMAT_NV12: return 12; @@ -733,10 +730,8 @@ bool amt_from_wg_format(AM_MEDIA_TYPE *mt, const struct wg_format *format, bool switch (format->major_type) { case WG_MAJOR_TYPE_AUDIO_MPEG4: - case WG_MAJOR_TYPE_AUDIO_ENCODED: case WG_MAJOR_TYPE_VIDEO_H264: case WG_MAJOR_TYPE_VIDEO_INDEO: - case WG_MAJOR_TYPE_VIDEO_ENCODED: FIXME("Format %u not implemented!\n", format->major_type); /* fallthrough */ case WG_MAJOR_TYPE_UNKNOWN: @@ -944,7 +939,6 @@ static bool amt_to_wg_format_video(const AM_MEDIA_TYPE *mt, struct wg_format *fo {&MEDIASUBTYPE_ARGB32, WG_VIDEO_FORMAT_BGRA}, {&MEDIASUBTYPE_RGB32, WG_VIDEO_FORMAT_BGRx}, {&MEDIASUBTYPE_RGB24, WG_VIDEO_FORMAT_BGR}, - {&MFVideoFormat_ABGR32, WG_VIDEO_FORMAT_RGBA}, {&MEDIASUBTYPE_RGB555, WG_VIDEO_FORMAT_RGB15}, {&MEDIASUBTYPE_RGB565, WG_VIDEO_FORMAT_RGB16}, {&MEDIASUBTYPE_AYUV, WG_VIDEO_FORMAT_AYUV}, @@ -1454,7 +1448,7 @@ static HRESULT parser_init_stream(struct strmbase_filter *iface) { ret = amt_to_wg_format(&source->pin.pin.mt, &format); assert(ret); - wg_parser_stream_enable(source->wg_stream, &format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_enable(source->wg_stream, &format); } else { @@ -1648,7 +1642,7 @@ static HRESULT decodebin_parser_source_get_media_type(struct parser_source *pin, WG_VIDEO_FORMAT_RGB15, }; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); memset(mt, 0, sizeof(AM_MEDIA_TYPE)); @@ -2255,7 +2249,7 @@ static HRESULT wave_parser_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE pad_mt; HRESULT hr; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(&pad_mt, &format, false)) return E_OUTOFMEMORY; hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE; @@ -2270,7 +2264,7 @@ static HRESULT wave_parser_source_get_media_type(struct parser_source *pin, if (index > 0) return VFW_S_NO_MORE_ITEMS; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(mt, &format, false)) return E_OUTOFMEMORY; return S_OK; @@ -2333,7 +2327,7 @@ static HRESULT avi_splitter_source_query_accept(struct parser_source *pin, const AM_MEDIA_TYPE pad_mt; HRESULT hr; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(&pad_mt, &format, false)) return E_OUTOFMEMORY; hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE; @@ -2348,7 +2342,7 @@ static HRESULT avi_splitter_source_get_media_type(struct parser_source *pin, if (index > 0) return VFW_S_NO_MORE_ITEMS; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(mt, &format, false)) return E_OUTOFMEMORY; return S_OK; @@ -2404,7 +2398,7 @@ static BOOL mpeg_splitter_filter_init_gst(struct parser *filter) for (i = 0; i < stream_count; ++i) { stream = wg_parser_get_stream(parser, i); - wg_parser_stream_get_preferred_format(stream, &fmt); + wg_parser_stream_get_current_format(stream, &fmt); if (fmt.major_type == WG_MAJOR_TYPE_VIDEO_MPEG1) { if (!create_pin(filter, wg_parser_get_stream(parser, i), L"Video")) @@ -2427,7 +2421,7 @@ static HRESULT mpeg_splitter_source_query_accept(struct parser_source *pin, cons AM_MEDIA_TYPE pad_mt; HRESULT hr; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(&pad_mt, &format, false)) return E_OUTOFMEMORY; hr = compare_media_types(mt, &pad_mt) ? S_OK : S_FALSE; @@ -2442,7 +2436,7 @@ static HRESULT mpeg_splitter_source_get_media_type(struct parser_source *pin, if (index > 0) return VFW_S_NO_MORE_ITEMS; - wg_parser_stream_get_preferred_format(pin->wg_stream, &format); + wg_parser_stream_get_current_format(pin->wg_stream, &format); if (!amt_from_wg_format(mt, &format, false)) return E_OUTOFMEMORY; return S_OK; diff --git a/dlls/winegstreamer/quartz_transform.c b/dlls/winegstreamer/quartz_transform.c index b85b24f4278..ff57ea02ed0 100644 --- a/dlls/winegstreamer/quartz_transform.c +++ b/dlls/winegstreamer/quartz_transform.c @@ -737,39 +737,24 @@ static const struct transform_ops mpeg_audio_codec_transform_ops = HRESULT mpeg_audio_codec_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format output_format = + static const WAVEFORMATEX output_format = { - .major_type = WG_MAJOR_TYPE_AUDIO, - .u.audio = - { - .format = WG_AUDIO_FORMAT_S16LE, - .channel_mask = 1, - .channels = 1, - .rate = 44100, - }, + .wFormatTag = WAVE_FORMAT_PCM, .wBitsPerSample = 16, .nSamplesPerSec = 44100, .nChannels = 1, }; - static const struct wg_format input_format = + static const MPEG1WAVEFORMAT input_format = { - .major_type = WG_MAJOR_TYPE_AUDIO_MPEG1, - .u.audio = - { - .layer = 2, - .channels = 1, - .rate = 44100, - }, + .wfx = {.wFormatTag = WAVE_FORMAT_MPEG, .nSamplesPerSec = 44100, .nChannels = 1, + .cbSize = sizeof(input_format) - sizeof(WAVEFORMATEX)}, + .fwHeadLayer = 2, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct transform *object; HRESULT hr; - transform = wg_transform_create(&input_format, &output_format, &attrs); - if (!transform) + if (FAILED(hr = check_audio_transform_support(&input_format.wfx, &output_format))) { ERR_(winediag)("GStreamer doesn't support MPEG-1 audio decoding, please install appropriate plugins.\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); hr = transform_create(outer, &CLSID_CMpegAudioCodec, &mpeg_audio_codec_transform_ops, &object); if (FAILED(hr)) @@ -889,31 +874,26 @@ static const struct transform_ops mpeg_video_codec_transform_ops = HRESULT mpeg_video_codec_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format output_format = + const MFVIDEOFORMAT output_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = { - .format = WG_VIDEO_FORMAT_I420, - /* size doesn't matter, this one is only used to check if the GStreamer plugin exists */ - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MEDIASUBTYPE_NV12, }; - static const struct wg_format input_format = + const MFVIDEOFORMAT input_format = { - .major_type = WG_MAJOR_TYPE_VIDEO_MPEG1, - .u.video = {}, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MEDIASUBTYPE_MPEG1Payload, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct transform *object; HRESULT hr; - transform = wg_transform_create(&input_format, &output_format, &attrs); - if (!transform) + if (FAILED(hr = check_video_transform_support(&input_format, &output_format))) { ERR_(winediag)("GStreamer doesn't support MPEG-1 video decoding, please install appropriate plugins.\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); hr = transform_create(outer, &CLSID_CMpegVideoCodec, &mpeg_video_codec_transform_ops, &object); if (FAILED(hr)) @@ -1015,39 +995,23 @@ static const struct transform_ops mpeg_layer3_decoder_transform_ops = HRESULT mpeg_layer3_decoder_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format output_format = + static const WAVEFORMATEX output_format = { - .major_type = WG_MAJOR_TYPE_AUDIO, - .u.audio = - { - .format = WG_AUDIO_FORMAT_S16LE, - .channel_mask = 1, - .channels = 1, - .rate = 44100, - }, + .wFormatTag = WAVE_FORMAT_PCM, .wBitsPerSample = 16, .nSamplesPerSec = 44100, .nChannels = 1, }; - static const struct wg_format input_format = + static const MPEGLAYER3WAVEFORMAT input_format = { - .major_type = WG_MAJOR_TYPE_AUDIO_MPEG1, - .u.audio = - { - .layer = 3, - .channels = 1, - .rate = 44100, - }, + .wfx = {.wFormatTag = WAVE_FORMAT_MPEGLAYER3, .nSamplesPerSec = 44100, .nChannels = 1, + .cbSize = sizeof(input_format) - sizeof(WAVEFORMATEX)}, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct transform *object; HRESULT hr; - transform = wg_transform_create(&input_format, &output_format, &attrs); - if (!transform) + if (FAILED(hr = check_audio_transform_support(&input_format.wfx, &output_format))) { - ERR_(winediag)("GStreamer doesn't support MPEG-1 audio decoding, please install appropriate plugins.\n"); - return E_FAIL; + ERR_(winediag)("GStreamer doesn't support MP3 audio decoding, please install appropriate plugins.\n"); + return hr; } - wg_transform_destroy(transform); hr = transform_create(outer, &CLSID_mpeg_layer3_decoder, &mpeg_layer3_decoder_transform_ops, &object); if (FAILED(hr)) diff --git a/dlls/winegstreamer/resampler.c b/dlls/winegstreamer/resampler.c index b5b62d58800..c9c81cde555 100644 --- a/dlls/winegstreamer/resampler.c +++ b/dlls/winegstreamer/resampler.c @@ -45,36 +45,39 @@ struct resampler IUnknown *outer; LONG refcount; - IMFMediaType *input_type; + WAVEFORMATEX *input_format; MFT_INPUT_STREAM_INFO input_info; - IMFMediaType *output_type; + WAVEFORMATEX *output_format; MFT_OUTPUT_STREAM_INFO output_info; wg_transform_t wg_transform; struct wg_sample_queue *wg_sample_queue; }; -static HRESULT try_create_wg_transform(struct resampler *impl) +static HRESULT try_create_wg_transform(struct resampler *impl, WAVEFORMATEX *input_format, WAVEFORMATEX *output_format) { - struct wg_format input_format, output_format; + IMFMediaType *input_type, *output_type; struct wg_transform_attrs attrs = {0}; + HRESULT hr; if (impl->wg_transform) + { wg_transform_destroy(impl->wg_transform); - impl->wg_transform = 0; - - mf_media_type_to_wg_format(impl->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - mf_media_type_to_wg_format(impl->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; + impl->wg_transform = 0; + } - if (!(impl->wg_transform = wg_transform_create(&input_format, &output_format, &attrs))) - return E_FAIL; + if (FAILED(hr = MFCreateAudioMediaType(input_format, (IMFAudioMediaType **)&input_type))) + return hr; + if (FAILED(hr = MFCreateAudioMediaType(output_format, (IMFAudioMediaType **)&output_type))) + { + IMFMediaType_Release(input_type); + return hr; + } - return S_OK; + hr = wg_transform_create_mf(input_type, output_type, &attrs, &impl->wg_transform); + IMFMediaType_Release(input_type); + IMFMediaType_Release(output_type); + return hr; } static inline struct resampler *impl_from_IUnknown(IUnknown *iface) @@ -132,10 +135,10 @@ static ULONG WINAPI unknown_Release(IUnknown *iface) { if (impl->wg_transform) wg_transform_destroy(impl->wg_transform); - if (impl->input_type) - IMFMediaType_Release(impl->input_type); - if (impl->output_type) - IMFMediaType_Release(impl->output_type); + if (impl->input_format) + CoTaskMemFree(impl->input_format); + if (impl->output_format) + CoTaskMemFree(impl->output_format); wg_sample_queue_destroy(impl->wg_sample_queue); free(impl); @@ -201,7 +204,7 @@ static HRESULT WINAPI transform_GetInputStreamInfo(IMFTransform *iface, DWORD id TRACE("iface %p, id %#lx, info %p.\n", iface, id, info); - if (!impl->input_type || !impl->output_type) + if (!impl->input_format || !impl->output_format) { memset(info, 0, sizeof(*info)); return MF_E_TRANSFORM_TYPE_NOT_SET; @@ -217,7 +220,7 @@ static HRESULT WINAPI transform_GetOutputStreamInfo(IMFTransform *iface, DWORD i TRACE("iface %p, id %#lx, info %p.\n", iface, id, info); - if (!impl->input_type || !impl->output_type) + if (!impl->input_format || !impl->output_format) { memset(info, 0, sizeof(*info)); return MF_E_TRANSFORM_TYPE_NOT_SET; @@ -369,34 +372,36 @@ static HRESULT check_media_type(IMFMediaType *type) static HRESULT WINAPI transform_SetInputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) { struct resampler *impl = impl_from_IMFTransform(iface); - UINT32 block_alignment; + WAVEFORMATEX *format; + UINT32 value; HRESULT hr; TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); if (FAILED(hr = check_media_type(type))) return hr; - if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &block_alignment))) + if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &value))) return MF_E_INVALIDMEDIATYPE; if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; - if (!impl->input_type && FAILED(hr = MFCreateMediaType(&impl->input_type))) - return hr; - - if (impl->output_type) + if (impl->input_format) + { + CoTaskMemFree(impl->input_format); + impl->input_format = NULL; + } + if (impl->output_format) { - IMFMediaType_Release(impl->output_type); - impl->output_type = NULL; + CoTaskMemFree(impl->output_format); + impl->output_format = NULL; } - if (SUCCEEDED(hr = IMFMediaType_CopyAllItems(type, (IMFAttributes *)impl->input_type))) - impl->input_info.cbSize = block_alignment; - else + if (SUCCEEDED(hr = MFCreateWaveFormatExFromMFMediaType(type, &format, &value, 0))) { - IMFMediaType_Release(impl->input_type); - impl->input_info.cbSize = 0; - impl->input_type = NULL; + format->nBlockAlign = format->wBitsPerSample * format->nChannels / 8; + format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign; + impl->input_info.cbSize = format->nBlockAlign; + impl->input_format = format; } return hr; @@ -405,82 +410,69 @@ static HRESULT WINAPI transform_SetInputType(IMFTransform *iface, DWORD id, IMFM static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMFMediaType *type, DWORD flags) { struct resampler *impl = impl_from_IMFTransform(iface); - UINT32 block_alignment; + WAVEFORMATEX *format; + UINT32 value; HRESULT hr; TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); - if (!impl->input_type) + if (!impl->input_format) return MF_E_TRANSFORM_TYPE_NOT_SET; if (FAILED(hr = check_media_type(type))) return hr; - if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &block_alignment))) + if (FAILED(hr = IMFMediaType_GetUINT32(type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, &value))) return MF_E_INVALIDMEDIATYPE; if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; - if (!impl->output_type && FAILED(hr = MFCreateMediaType(&impl->output_type))) - return hr; - - if (FAILED(hr = IMFMediaType_CopyAllItems(type, (IMFAttributes *)impl->output_type))) - goto failed; + if (impl->output_format) + { + CoTaskMemFree(impl->output_format); + impl->output_format = NULL; + } - if (FAILED(hr = try_create_wg_transform(impl))) - goto failed; + if (SUCCEEDED(hr = MFCreateWaveFormatExFromMFMediaType(type, &format, &value, 0))) + { + format->nBlockAlign = format->wBitsPerSample * format->nChannels / 8; + format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign; - impl->output_info.cbSize = block_alignment; - return hr; + if (FAILED(hr = try_create_wg_transform(impl, impl->input_format, format))) + CoTaskMemFree(format); + else + { + impl->output_info.cbSize = format->nBlockAlign; + impl->output_format = format; + } + } -failed: - IMFMediaType_Release(impl->output_type); - impl->output_info.cbSize = 0; - impl->output_type = NULL; return hr; } static HRESULT WINAPI transform_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) { struct resampler *impl = impl_from_IMFTransform(iface); - HRESULT hr; TRACE("iface %p, id %#lx, type %p.\n", iface, id, type); if (id != 0) return MF_E_INVALIDSTREAMNUMBER; - - if (!impl->input_type) + if (!impl->input_format) return MF_E_TRANSFORM_TYPE_NOT_SET; - - if (FAILED(hr = MFCreateMediaType(type))) - return hr; - - if (FAILED(hr = IMFMediaType_CopyAllItems(impl->input_type, (IMFAttributes *)*type))) - IMFMediaType_Release(*type); - - return hr; + return MFCreateAudioMediaType(impl->input_format, (IMFAudioMediaType **)type); } static HRESULT WINAPI transform_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) { struct resampler *impl = impl_from_IMFTransform(iface); - HRESULT hr; TRACE("iface %p, id %#lx, type %p.\n", iface, id, type); if (id != 0) return MF_E_INVALIDSTREAMNUMBER; - - if (!impl->output_type) + if (!impl->output_format) return MF_E_TRANSFORM_TYPE_NOT_SET; - - if (FAILED(hr = MFCreateMediaType(type))) - return hr; - - if (FAILED(hr = IMFMediaType_CopyAllItems(impl->output_type, (IMFAttributes *)*type))) - IMFMediaType_Release(*type); - - return hr; + return MFCreateAudioMediaType(impl->output_format, (IMFAudioMediaType **)type); } static HRESULT WINAPI transform_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) @@ -551,7 +543,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return hr; if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, - info.cbSize, NULL, &samples->dwStatus))) + info.cbSize, &samples->dwStatus))) wg_sample_queue_flush(impl->wg_sample_queue, false); return hr; @@ -918,41 +910,24 @@ static const IWMResamplerPropsVtbl resampler_props_vtbl = HRESULT resampler_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format input_format = + static const WAVEFORMATEX output_format = { - .major_type = WG_MAJOR_TYPE_AUDIO, - .u.audio = - { - .format = WG_AUDIO_FORMAT_S16LE, - .channel_mask = 1, - .channels = 1, - .rate = 44100, - }, + .wFormatTag = WAVE_FORMAT_IEEE_FLOAT, .wBitsPerSample = 32, .nSamplesPerSec = 44100, .nChannels = 1, }; - static const struct wg_format output_format = + static const WAVEFORMATEX input_format = { - .major_type = WG_MAJOR_TYPE_AUDIO, - .u.audio = - { - .format = WG_AUDIO_FORMAT_F32LE, - .channel_mask = 1, - .channels = 1, - .rate = 44100, - }, + .wFormatTag = WAVE_FORMAT_PCM, .wBitsPerSample = 16, .nSamplesPerSec = 44100, .nChannels = 1, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct resampler *impl; HRESULT hr; TRACE("outer %p, out %p.\n", outer, out); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) + if (FAILED(hr = check_audio_transform_support(&input_format, &output_format))) { ERR_(winediag)("GStreamer doesn't support audio resampling, please install appropriate plugins.\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); if (!(impl = calloc(1, sizeof(*impl)))) return E_OUTOFMEMORY; diff --git a/dlls/winegstreamer/unix_private.h b/dlls/winegstreamer/unix_private.h index 48def4245ec..cc842c53ecf 100644 --- a/dlls/winegstreamer/unix_private.h +++ b/dlls/winegstreamer/unix_private.h @@ -25,6 +25,7 @@ #include #include +#include #include #include @@ -57,6 +58,7 @@ extern void set_max_threads(GstElement *element); extern void wg_format_from_caps(struct wg_format *format, const GstCaps *caps); extern bool wg_format_compare(const struct wg_format *a, const struct wg_format *b); extern GstCaps *wg_format_to_caps(const struct wg_format *format); +extern uint32_t wg_channel_mask_from_gst(const GstAudioInfo *info); /* wg_source.c */ @@ -68,7 +70,7 @@ extern NTSTATUS wg_source_get_position(void *args); extern NTSTATUS wg_source_set_position(void *args); extern NTSTATUS wg_source_push_data(void *args); extern NTSTATUS wg_source_read_data(void *args); -extern NTSTATUS wg_source_get_stream_format(void *args); +extern NTSTATUS wg_source_get_stream_type(void *args); extern NTSTATUS wg_source_get_stream_tag(void *args); extern NTSTATUS wg_source_set_stream_flags(void *args); @@ -76,7 +78,8 @@ extern NTSTATUS wg_source_set_stream_flags(void *args); extern NTSTATUS wg_transform_create(void *args); extern NTSTATUS wg_transform_destroy(void *args); -extern NTSTATUS wg_transform_set_output_format(void *args); +extern NTSTATUS wg_transform_get_output_type(void *args); +extern NTSTATUS wg_transform_set_output_type(void *args); extern NTSTATUS wg_transform_push_data(void *args); extern NTSTATUS wg_transform_read_data(void *args); extern NTSTATUS wg_transform_get_status(void *args); @@ -84,6 +87,12 @@ extern NTSTATUS wg_transform_drain(void *args); extern NTSTATUS wg_transform_flush(void *args); extern NTSTATUS wg_transform_notify_qos(void *args); +/* wg_media_type.c */ + +extern GstCaps *caps_from_media_type(const struct wg_media_type *media_type); +extern NTSTATUS caps_to_media_type(GstCaps *caps, struct wg_media_type *media_type, + UINT32 video_plane_align); + /* wg_muxer.c */ extern NTSTATUS wg_muxer_create(void *args); @@ -115,8 +124,7 @@ extern void wg_allocator_release_sample(GstAllocator *allocator, struct wg_sampl /* media-converter */ extern bool media_converter_init(void); -extern bool get_untranscoded_stream_format(GstElement *container, uint32_t stream_index, - struct wg_format *codec_format); +extern bool get_untranscoded_stream_format(GstElement *container, uint32_t stream_index, GstCaps *caps); static inline void touch_h264_used_tag(void) { diff --git a/dlls/winegstreamer/unixlib.c b/dlls/winegstreamer/unixlib.c index a285db2c866..5c2a14ee2f9 100644 --- a/dlls/winegstreamer/unixlib.c +++ b/dlls/winegstreamer/unixlib.c @@ -150,6 +150,10 @@ GstElement *find_element(GstElementFactoryListType type, GstCaps *element_sink_c continue; } + /* ignore protonvideoconverter when manually creating element, use protondemuxer instead */ + if (!strcmp(name, "protonvideoconverter")) + continue; + element = factory_create_element(GST_ELEMENT_FACTORY(tmp->data)); } diff --git a/dlls/winegstreamer/unixlib.h b/dlls/winegstreamer/unixlib.h index 7fe6bc284d4..f9b87e51b93 100644 --- a/dlls/winegstreamer/unixlib.h +++ b/dlls/winegstreamer/unixlib.h @@ -26,9 +26,40 @@ #include "winternl.h" #include "wtypes.h" #include "mmreg.h" +#include "vfw.h" +#include "dshow.h" +#include "dvdmedia.h" +#include "mfobjects.h" #include "wine/unixlib.h" +/* same as MPEG1VIDEOINFO / MPEG2VIDEOINFO but with MFVIDEOFORMAT */ +struct mpeg_video_format +{ + MFVIDEOFORMAT hdr; + UINT32 start_time_code; + UINT32 profile; + UINT32 level; + UINT32 flags; + UINT32 sequence_header_count; + UINT32 __pad; + BYTE sequence_header[]; +}; + +C_ASSERT(sizeof(struct mpeg_video_format) == offsetof(struct mpeg_video_format, sequence_header[0])); + +struct wg_media_type +{ + GUID major; + UINT32 format_size; + union + { + void *format; + WAVEFORMATEX *audio; + MFVIDEOFORMAT *video; + } u; +}; + typedef UINT32 wg_major_type; enum wg_major_type { @@ -37,14 +68,12 @@ enum wg_major_type WG_MAJOR_TYPE_AUDIO_MPEG1, WG_MAJOR_TYPE_AUDIO_MPEG4, WG_MAJOR_TYPE_AUDIO_WMA, - WG_MAJOR_TYPE_AUDIO_ENCODED, WG_MAJOR_TYPE_VIDEO, WG_MAJOR_TYPE_VIDEO_CINEPAK, WG_MAJOR_TYPE_VIDEO_H264, WG_MAJOR_TYPE_VIDEO_WMV, WG_MAJOR_TYPE_VIDEO_INDEO, WG_MAJOR_TYPE_VIDEO_MPEG1, - WG_MAJOR_TYPE_VIDEO_ENCODED, }; typedef UINT32 wg_audio_format; @@ -68,9 +97,9 @@ enum wg_video_format WG_VIDEO_FORMAT_BGRA, WG_VIDEO_FORMAT_BGRx, WG_VIDEO_FORMAT_BGR, - WG_VIDEO_FORMAT_RGBA, WG_VIDEO_FORMAT_RGB15, WG_VIDEO_FORMAT_RGB16, + WG_VIDEO_FORMAT_RGBA, WG_VIDEO_FORMAT_AYUV, WG_VIDEO_FORMAT_I420, @@ -99,8 +128,7 @@ struct wg_format * MPEG1: channels, rate, layer. * MPEG4: payload_type, codec_data_len, codec_data. * WMA: channels, rate, bitrate, depth, block_align, version, layer, - * payload_type, codec_data_len, codec_data, is_xma. - * ENCODED: channels, rate, caps. */ + * payload_type, codec_data_len, codec_data */ struct { wg_audio_format format; @@ -117,7 +145,6 @@ struct wg_format uint32_t codec_data_len; unsigned char codec_data[64]; UINT8 is_xma; - char caps[512]; } audio; /* Valid members for different video formats: @@ -142,7 +169,6 @@ struct wg_format uint32_t version; uint32_t codec_data_len; unsigned char codec_data[64]; - char caps[512]; } video; } u; }; @@ -244,7 +270,13 @@ struct wg_parser_get_stream_params wg_parser_stream_t stream; }; -struct wg_parser_stream_get_preferred_format_params +struct wg_parser_stream_get_current_type_params +{ + wg_parser_stream_t stream; + struct wg_media_type media_type; +}; + +struct wg_parser_stream_get_current_format_params { wg_parser_stream_t stream; struct wg_format *format; @@ -256,13 +288,16 @@ struct wg_parser_stream_get_codec_format_params struct wg_format *format; }; -#define STREAM_ENABLE_FLAG_FLIP_RGB 0x1 - struct wg_parser_stream_enable_params { wg_parser_stream_t stream; const struct wg_format *format; - uint32_t flags; +}; + +struct wg_parser_stream_enable_type_params +{ + wg_parser_stream_t stream; + struct wg_media_type media_type; }; struct wg_parser_stream_get_buffer_params @@ -356,6 +391,7 @@ struct wg_source_set_position_params struct wg_source_push_data_params { wg_source_t source; + UINT64 offset; const void *data; UINT32 size; }; @@ -367,11 +403,11 @@ struct wg_source_read_data_params struct wg_sample *sample; }; -struct wg_source_get_stream_format_params +struct wg_source_get_stream_type_params { wg_source_t source; UINT32 index; - struct wg_format format; + struct wg_media_type media_type; }; struct wg_source_get_stream_tag_params @@ -394,16 +430,16 @@ struct wg_transform_attrs { UINT32 output_plane_align; UINT32 input_queue_length; - BOOL allow_size_change; + BOOL allow_format_change; BOOL low_latency; }; struct wg_transform_create_params { wg_transform_t transform; - const struct wg_format *input_format; - const struct wg_format *output_format; - const struct wg_transform_attrs *attrs; + struct wg_media_type input_type; + struct wg_media_type output_type; + struct wg_transform_attrs attrs; }; struct wg_transform_push_data_params @@ -417,14 +453,19 @@ struct wg_transform_read_data_params { wg_transform_t transform; struct wg_sample *sample; - struct wg_format *format; HRESULT result; }; -struct wg_transform_set_output_format_params +struct wg_transform_get_output_type_params { wg_transform_t transform; - const struct wg_format *format; + struct wg_media_type media_type; +}; + +struct wg_transform_set_output_type_params +{ + wg_transform_t transform; + struct wg_media_type media_type; }; struct wg_transform_get_status_params @@ -486,9 +527,11 @@ enum unix_funcs unix_wg_parser_get_stream_count, unix_wg_parser_get_stream, - unix_wg_parser_stream_get_preferred_format, + unix_wg_parser_stream_get_current_type, + unix_wg_parser_stream_get_current_format, unix_wg_parser_stream_get_codec_format, unix_wg_parser_stream_enable, + unix_wg_parser_stream_enable_type, unix_wg_parser_stream_disable, unix_wg_parser_stream_get_buffer, @@ -508,13 +551,14 @@ enum unix_funcs unix_wg_source_set_position, unix_wg_source_push_data, unix_wg_source_read_data, - unix_wg_source_get_stream_format, + unix_wg_source_get_stream_type, unix_wg_source_get_stream_tag, unix_wg_source_set_stream_flags, unix_wg_transform_create, unix_wg_transform_destroy, - unix_wg_transform_set_output_format, + unix_wg_transform_get_output_type, + unix_wg_transform_set_output_type, unix_wg_transform_push_data, unix_wg_transform_read_data, diff --git a/dlls/winegstreamer/video_decoder.c b/dlls/winegstreamer/video_decoder.c index 32f68809d3a..ab937c83b77 100644 --- a/dlls/winegstreamer/video_decoder.c +++ b/dlls/winegstreamer/video_decoder.c @@ -36,8 +36,6 @@ WINE_DEFAULT_DEBUG_CHANNEL(mfplat); WINE_DECLARE_DEBUG_CHANNEL(winediag); -extern GUID MFVideoFormat_GStreamer; - struct subtype_info { const GUID *subtype; @@ -67,6 +65,7 @@ static const struct subtype_info subtype_info_list[] = { &MEDIASUBTYPE_RGB32, 32, BI_RGB }, }; +extern GUID MFVideoFormat_GStreamer; static const GUID *const video_decoder_input_types[] = { &MFVideoFormat_GStreamer, @@ -78,6 +77,14 @@ static const GUID *const video_decoder_output_types[] = &MFVideoFormat_IYUV, &MFVideoFormat_I420, &MFVideoFormat_YUY2, + &MFVideoFormat_UYVY, + &MFVideoFormat_YVYU, + &MFVideoFormat_NV11, + &MFVideoFormat_RGB32, + &MFVideoFormat_RGB24, + &MFVideoFormat_RGB565, + &MFVideoFormat_RGB555, + &MFVideoFormat_RGB8, }; struct video_decoder @@ -244,28 +251,36 @@ static struct video_decoder *impl_from_IMFTransform(IMFTransform *iface) return CONTAINING_RECORD(iface, struct video_decoder, IMFTransform_iface); } -static HRESULT try_create_wg_transform(struct video_decoder *decoder) +static HRESULT normalize_stride(IMFMediaType *media_type, IMFMediaType **ret) +{ + DMO_MEDIA_TYPE amt; + HRESULT hr; + + if (SUCCEEDED(hr = MFInitAMMediaTypeFromMFMediaType(media_type, FORMAT_VideoInfo, &amt))) + { + VIDEOINFOHEADER *vih = (VIDEOINFOHEADER *)amt.pbFormat; + vih->bmiHeader.biHeight = abs(vih->bmiHeader.biHeight); + hr = MFCreateMediaTypeFromRepresentation(AM_MEDIA_TYPE_REPRESENTATION, &amt, ret); + FreeMediaType(&amt); + } + + return hr; +} + +static HRESULT try_create_wg_transform(struct video_decoder *decoder, IMFMediaType *output_type) { /* Call of Duty: Black Ops 3 doesn't care about the ProcessInput/ProcessOutput * return values, it calls them in a specific order and expects the decoder * transform to be able to queue its input buffers. We need to use a buffer list * to match its expectations. */ - struct wg_format input_format; - struct wg_format output_format; UINT32 low_latency; if (decoder->wg_transform) + { wg_transform_destroy(decoder->wg_transform); - decoder->wg_transform = 0; - - mf_media_type_to_wg_format(decoder->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - mf_media_type_to_wg_format(decoder->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; + decoder->wg_transform = 0; + } if (SUCCEEDED(IMFAttributes_GetUINT32(decoder->attributes, &MF_LOW_LATENCY, &low_latency))) decoder->wg_transform_attrs.low_latency = !!low_latency; @@ -276,13 +291,7 @@ static HRESULT try_create_wg_transform(struct video_decoder *decoder) decoder->wg_transform_attrs.low_latency = FALSE; } - if (!(decoder->wg_transform = wg_transform_create(&input_format, &output_format, &decoder->wg_transform_attrs))) - { - ERR("Failed to create transform with input major_type %u.\n", input_format.major_type); - return E_FAIL; - } - - return S_OK; + return wg_transform_create_mf(decoder->input_type, output_type, &decoder->wg_transform_attrs, &decoder->wg_transform); } static HRESULT create_output_media_type(struct video_decoder *decoder, const GUID *subtype, @@ -375,9 +384,15 @@ static HRESULT create_output_media_type(struct video_decoder *decoder, const GUI if (FAILED(hr = IMFVideoMediaType_SetBlob(video_type, &MF_MT_PAN_SCAN_APERTURE, (BYTE *)&aperture, sizeof(aperture)))) goto done; - IMFMediaType_AddRef((*media_type = (IMFMediaType *)video_type)); done: - IMFVideoMediaType_Release(video_type); + if (SUCCEEDED(hr)) + *media_type = (IMFMediaType *)video_type; + else + { + IMFVideoMediaType_Release(video_type); + *media_type = NULL; + } + return hr; } @@ -566,6 +581,27 @@ static HRESULT WINAPI transform_SetInputType(IMFTransform *iface, DWORD id, IMFM TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); + if (!type) + { + if (decoder->input_type) + { + IMFMediaType_Release(decoder->input_type); + decoder->input_type = NULL; + } + if (decoder->output_type) + { + IMFMediaType_Release(decoder->output_type); + decoder->output_type = NULL; + } + if (decoder->wg_transform) + { + wg_transform_destroy(decoder->wg_transform); + decoder->wg_transform = 0; + } + + return S_OK; + } + if (FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_SUBTYPE, &subtype))) return E_INVALIDARG; @@ -611,12 +647,29 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF { struct video_decoder *decoder = impl_from_IMFTransform(iface); UINT64 frame_size, stream_frame_size; + IMFMediaType *output_type; GUID major, subtype; HRESULT hr; ULONG i; TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); + if (!type) + { + if (decoder->output_type) + { + IMFMediaType_Release(decoder->output_type); + decoder->output_type = NULL; + } + if (decoder->wg_transform) + { + wg_transform_destroy(decoder->wg_transform); + decoder->wg_transform = 0; + } + + return S_OK; + } + if (!decoder->input_type) return MF_E_TRANSFORM_TYPE_NOT_SET; @@ -645,32 +698,38 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF IMFMediaType_Release(decoder->output_type); IMFMediaType_AddRef((decoder->output_type = type)); - if (decoder->wg_transform) + /* WMV decoder outputs RGB formats with default stride forced to negative, likely a + * result of internal conversion to DMO media type */ + if (!decoder->IMediaObject_iface.lpVtbl) { - struct wg_format output_format; - mf_media_type_to_wg_format(decoder->output_type, &output_format); - - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN - || !wg_transform_set_output_format(decoder->wg_transform, &output_format)) - { - IMFMediaType_Release(decoder->output_type); - decoder->output_type = NULL; - return MF_E_INVALIDMEDIATYPE; - } + output_type = decoder->output_type; + IMFMediaType_AddRef(output_type); } - else if (FAILED(hr = try_create_wg_transform(decoder))) + else if (FAILED(hr = normalize_stride(decoder->output_type, &output_type))) { IMFMediaType_Release(decoder->output_type); decoder->output_type = NULL; + return hr; } + if (decoder->wg_transform) + hr = wg_transform_set_output_type(decoder->wg_transform, output_type); + else + hr = try_create_wg_transform(decoder, output_type); + + IMFMediaType_Release(output_type); + + if (FAILED(hr)) + { + IMFMediaType_Release(decoder->output_type); + decoder->output_type = NULL; + } return hr; } static HRESULT WINAPI transform_GetInputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) { struct video_decoder *decoder = impl_from_IMFTransform(iface); - GUID subtype; HRESULT hr; TRACE("iface %p, id %#lx, type %p\n", iface, id, type); @@ -678,26 +737,25 @@ static HRESULT WINAPI transform_GetInputCurrentType(IMFTransform *iface, DWORD i if (!decoder->input_type) return MF_E_TRANSFORM_TYPE_NOT_SET; - if (FAILED(hr = IMFMediaType_GetGUID(decoder->output_type, &MF_MT_SUBTYPE, &subtype))) + if (FAILED(hr = MFCreateMediaType(type))) return hr; - return create_output_media_type(decoder, &subtype, decoder->output_type, type); + return IMFMediaType_CopyAllItems(decoder->input_type, (IMFAttributes *)*type); } static HRESULT WINAPI transform_GetOutputCurrentType(IMFTransform *iface, DWORD id, IMFMediaType **type) { struct video_decoder *decoder = impl_from_IMFTransform(iface); + GUID subtype; HRESULT hr; TRACE("iface %p, id %#lx, type %p\n", iface, id, type); if (!decoder->output_type) return MF_E_TRANSFORM_TYPE_NOT_SET; - - if (FAILED(hr = MFCreateMediaType(type))) + if (FAILED(hr = IMFMediaType_GetGUID(decoder->output_type, &MF_MT_SUBTYPE, &subtype))) return hr; - - return IMFMediaType_CopyAllItems(decoder->output_type, (IMFAttributes *)*type); + return create_output_media_type(decoder, &subtype, decoder->output_type, type); } static HRESULT WINAPI transform_GetInputStatus(IMFTransform *iface, DWORD id, DWORD *flags) @@ -758,7 +816,7 @@ static HRESULT WINAPI transform_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_ return wg_transform_flush(decoder->wg_transform); case MFT_MESSAGE_NOTIFY_START_OF_STREAM: - decoder->sample_time = 0; + decoder->sample_time = -1; return S_OK; default: @@ -776,6 +834,9 @@ static HRESULT WINAPI transform_ProcessInput(IMFTransform *iface, DWORD id, IMFS if (!decoder->wg_transform) return MF_E_TRANSFORM_TYPE_NOT_SET; + if (decoder->sample_time == -1 && FAILED(IMFSample_GetSampleTime(sample, (LONGLONG *)&decoder->sample_time))) + decoder->sample_time = 0; + return wg_transform_push_mf(decoder->wg_transform, sample, decoder->wg_sample_queue); } @@ -809,16 +870,18 @@ static HRESULT output_sample(struct video_decoder *decoder, IMFSample **out, IMF return S_OK; } -static HRESULT handle_stream_type_change(struct video_decoder *decoder, const struct wg_format *format) +static HRESULT handle_stream_type_change(struct video_decoder *decoder) { UINT64 frame_size, frame_rate; - GUID subtype; HRESULT hr; if (decoder->stream_type) IMFMediaType_Release(decoder->stream_type); - if (!(decoder->stream_type = mf_media_type_from_wg_format(format))) - return E_OUTOFMEMORY; + if (FAILED(hr = wg_transform_get_output_type(decoder->wg_transform, &decoder->stream_type))) + { + WARN("Failed to get transform output type, hr %#lx\n", hr); + return hr; + } if (SUCCEEDED(IMFMediaType_GetUINT64(decoder->output_type, &MF_MT_FRAME_RATE, &frame_rate)) && FAILED(hr = IMFMediaType_SetUINT64(decoder->stream_type, &MF_MT_FRAME_RATE, frame_rate))) @@ -826,8 +889,6 @@ static HRESULT handle_stream_type_change(struct video_decoder *decoder, const st if (FAILED(hr = IMFMediaType_GetUINT64(decoder->stream_type, &MF_MT_FRAME_SIZE, &frame_size))) return hr; - if (FAILED(hr = IMFMediaType_GetGUID(decoder->stream_type, &MF_MT_SUBTYPE, &subtype))) - return hr; if (FAILED(hr = update_output_info_size(decoder, frame_size >> 32, frame_size))) return hr; uninit_allocator(decoder); @@ -839,7 +900,6 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, MFT_OUTPUT_DATA_BUFFER *samples, DWORD *status) { struct video_decoder *decoder = impl_from_IMFTransform(iface); - struct wg_format wg_format; UINT32 sample_size; LONGLONG duration; IMFSample *sample; @@ -889,7 +949,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, } if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, sample, - sample_size, &wg_format, &samples->dwStatus))) + sample_size, &samples->dwStatus))) { wg_sample_queue_flush(decoder->wg_sample_queue, false); @@ -908,7 +968,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, { samples[0].dwStatus |= MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; *status |= MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE; - hr = handle_stream_type_change(decoder, &wg_format); + hr = handle_stream_type_change(decoder); } if (decoder->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) @@ -1096,7 +1156,6 @@ static HRESULT WINAPI media_object_SetInputType(IMediaObject *iface, DWORD index { struct video_decoder *decoder = impl_from_IMediaObject(iface); IMFMediaType *media_type; - unsigned int i; TRACE("iface %p, index %lu, type %p, flags %#lx.\n", iface, index, type, flags); @@ -1122,12 +1181,6 @@ static HRESULT WINAPI media_object_SetInputType(IMediaObject *iface, DWORD index if (!IsEqualGUID(&type->majortype, &MEDIATYPE_Video)) return DMO_E_TYPE_NOT_ACCEPTED; - for (i = 0; i < decoder->input_type_count; ++i) - if (IsEqualGUID(&type->subtype, get_dmo_subtype(decoder->input_types[i]))) - break; - if (i == decoder->input_type_count) - FIXME("HACK: Using a type which is not an available input type of this decoder.\n"); - if (FAILED(MFCreateMediaTypeFromRepresentation(AM_MEDIA_TYPE_REPRESENTATION, (void *)type, &media_type))) return DMO_E_TYPE_NOT_ACCEPTED; @@ -1587,7 +1640,16 @@ HRESULT video_decoder_create(REFIID riid, void **out) video_decoder_output_types, ARRAY_SIZE(video_decoder_output_types), NULL, &decoder))) return hr; - TRACE("Create generic video decoder transform %p.\n", &decoder->IMFTransform_iface); + decoder->input_info.dwFlags = MFT_INPUT_STREAM_WHOLE_SAMPLES | MFT_INPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER + | MFT_INPUT_STREAM_FIXED_SAMPLE_SIZE; + decoder->input_info.cbSize = 0x1000; + decoder->output_info.dwFlags = MFT_OUTPUT_STREAM_WHOLE_SAMPLES | MFT_OUTPUT_STREAM_SINGLE_SAMPLE_PER_BUFFER + | MFT_OUTPUT_STREAM_FIXED_SAMPLE_SIZE; + decoder->output_info.cbSize = 1920 * 1088 * 2; + + decoder->wg_transform_attrs.allow_format_change = TRUE; + + TRACE("Created video decoder transform %p.\n", &decoder->IMFTransform_iface); hr = IMFTransform_QueryInterface(&decoder->IMFTransform_iface, riid, out); IMFTransform_Release(&decoder->IMFTransform_iface); @@ -1602,30 +1664,27 @@ static const GUID *const h264_decoder_input_types[] = HRESULT h264_decoder_create(REFIID riid, void **out) { - static const struct wg_format output_format = + const MFVIDEOFORMAT output_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_I420, - .width = 1920, - .height = 1080, - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MFVideoFormat_I420, + }; + const MFVIDEOFORMAT input_format = + { + .dwSize = sizeof(MFVIDEOFORMAT), + .guidFormat = MFVideoFormat_H264, }; - static const struct wg_format input_format = {.major_type = WG_MAJOR_TYPE_VIDEO_H264}; - struct wg_transform_attrs attrs = {0}; struct video_decoder *decoder; - wg_transform_t transform; HRESULT hr; TRACE("riid %s, out %p.\n", debugstr_guid(riid), out); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) + if (FAILED(hr = check_video_transform_support(&input_format, &output_format))) { ERR_(winediag)("GStreamer doesn't support H.264 decoding, please install appropriate plugins\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); if (FAILED(hr = video_decoder_create_with_types(h264_decoder_input_types, ARRAY_SIZE(h264_decoder_input_types), video_decoder_output_types, ARRAY_SIZE(video_decoder_output_types), NULL, &decoder))) @@ -1645,7 +1704,7 @@ HRESULT h264_decoder_create(REFIID riid, void **out) decoder->output_info.cbSize = 1920 * 1088 * 2; decoder->wg_transform_attrs.output_plane_align = 15; - decoder->wg_transform_attrs.allow_size_change = TRUE; + decoder->wg_transform_attrs.allow_format_change = TRUE; TRACE("Created h264 transform %p.\n", &decoder->IMFTransform_iface); @@ -1725,33 +1784,11 @@ static const GUID *const wmv_decoder_output_types[] = HRESULT wmv_decoder_create(IUnknown *outer, IUnknown **out) { - static const struct wg_format input_format = - { - .major_type = WG_MAJOR_TYPE_VIDEO_WMV, - .u.video.format = WG_VIDEO_FORMAT_WMV3, - }; - static const struct wg_format output_format = - { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_NV12, - .width = 1920, - .height = 1080, - }, - }; - struct wg_transform_attrs attrs = {0}; struct video_decoder *decoder; - wg_transform_t transform; HRESULT hr; TRACE("outer %p, out %p.\n", outer, out); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) - FIXME_(winediag)("HACK: Create wmv decoder even if plugins are not installed.\n"); - else - wg_transform_destroy(transform); - if (FAILED(hr = video_decoder_create_with_types(wmv_decoder_input_types, ARRAY_SIZE(wmv_decoder_input_types), wmv_decoder_output_types, ARRAY_SIZE(wmv_decoder_output_types), outer, &decoder))) return hr; diff --git a/dlls/winegstreamer/video_processor.c b/dlls/winegstreamer/video_processor.c index 1cbb37dafc7..097f0fb5da6 100644 --- a/dlls/winegstreamer/video_processor.c +++ b/dlls/winegstreamer/video_processor.c @@ -23,11 +23,16 @@ #include "mftransform.h" #include "wmcodecdsp.h" +#include "initguid.h" +#include "d3d11.h" + #include "wine/debug.h" WINE_DEFAULT_DEBUG_CHANNEL(mfplat); WINE_DECLARE_DEBUG_CHANNEL(winediag); +extern GUID MFVideoFormat_ABGR32; + static const GUID *const input_types[] = { &MFVideoFormat_IYUV, @@ -66,6 +71,7 @@ static const GUID *const output_types[] = &MFVideoFormat_AYUV, &MFVideoFormat_RGB555, &MFVideoFormat_RGB565, + &MFVideoFormat_ABGR32, }; struct video_processor @@ -81,38 +87,334 @@ struct video_processor IMFMediaType *output_type; MFT_OUTPUT_STREAM_INFO output_info; + IMFSample *input_sample; wg_transform_t wg_transform; struct wg_sample_queue *wg_sample_queue; + + IUnknown *device_manager; + IMFVideoSampleAllocatorEx *allocator; }; +static HRESULT normalize_stride(IMFMediaType *media_type, BOOL bottom_up, IMFMediaType **ret) +{ + MFVIDEOFORMAT *format; + LONG stride; + UINT32 size; + HRESULT hr; + + if (SUCCEEDED(hr = IMFMediaType_GetUINT32(media_type, &MF_MT_DEFAULT_STRIDE, (UINT32 *)&stride))) + { + *ret = media_type; + IMFMediaType_AddRef(media_type); + return hr; + } + + if (SUCCEEDED(hr = MFCreateMFVideoFormatFromMFMediaType(media_type, &format, &size))) + { + if (bottom_up) format->videoInfo.VideoFlags |= MFVideoFlag_BottomUpLinearRep; + hr = MFCreateVideoMediaType(format, (IMFVideoMediaType **)ret); + CoTaskMemFree(format); + } + + return hr; +} + static HRESULT try_create_wg_transform(struct video_processor *impl) { - struct wg_format input_format, output_format; + BOOL bottom_up = !impl->device_manager; /* when not D3D-enabled, the transform outputs bottom up RGB buffers */ + IMFMediaType *input_type, *output_type; struct wg_transform_attrs attrs = {0}; + HRESULT hr; if (impl->wg_transform) + { wg_transform_destroy(impl->wg_transform); - impl->wg_transform = 0; + impl->wg_transform = 0; + } - mf_media_type_to_wg_format(impl->input_type, &input_format); - if (input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; + if (FAILED(hr = normalize_stride(impl->input_type, bottom_up, &input_type))) + return hr; + if (FAILED(hr = normalize_stride(impl->output_type, bottom_up, &output_type))) + { + IMFMediaType_Release(input_type); + return hr; + } + hr = wg_transform_create_mf(input_type, output_type, &attrs, &impl->wg_transform); + IMFMediaType_Release(output_type); + IMFMediaType_Release(input_type); - mf_media_type_to_wg_format(impl->output_type, &output_format); - if (output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; + return hr; +} + +static HRESULT video_processor_init_allocator(struct video_processor *processor) +{ + IMFVideoSampleAllocatorEx *allocator; + UINT32 count; + HRESULT hr; + + if (processor->allocator) + return S_OK; - /* prevent fps differences from failing to connect the elements */ - if (output_format.u.video.fps_d || output_format.u.video.fps_n) + if (FAILED(hr = MFCreateVideoSampleAllocatorEx(&IID_IMFVideoSampleAllocatorEx, (void **)&allocator))) + return hr; + if (FAILED(IMFAttributes_GetUINT32(processor->attributes, &MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, &count))) + count = 2; + if (FAILED(hr = IMFVideoSampleAllocatorEx_SetDirectXManager(allocator, processor->device_manager)) + || FAILED(hr = IMFVideoSampleAllocatorEx_InitializeSampleAllocatorEx(allocator, count, max(count + 2, 10), + processor->output_attributes, processor->output_type))) { - input_format.u.video.fps_d = output_format.u.video.fps_d; - input_format.u.video.fps_n = output_format.u.video.fps_n; + IMFVideoSampleAllocatorEx_Release(allocator); + return hr; } - if (!(impl->wg_transform = wg_transform_create(&input_format, &output_format, &attrs))) + processor->allocator = allocator; + return S_OK; +} + +static HRESULT video_processor_uninit_allocator(struct video_processor *processor) +{ + HRESULT hr; + + if (!processor->allocator) + return S_OK; + + if (SUCCEEDED(hr = IMFVideoSampleAllocatorEx_UninitializeSampleAllocator(processor->allocator))) + hr = IMFVideoSampleAllocatorEx_SetDirectXManager(processor->allocator, NULL); + IMFVideoSampleAllocatorEx_Release(processor->allocator); + processor->allocator = NULL; + + return hr; +} + +static HRESULT video_processor_get_d3d11_resource(IMFSample *sample, ID3D11Resource **resource) +{ + IMFMediaBuffer *buffer; + DWORD count; + HRESULT hr; + + if (FAILED(IMFSample_GetBufferCount(sample, &count)) || count > 1) return E_FAIL; - return S_OK; + if (SUCCEEDED(hr = IMFSample_GetBufferByIndex(sample, 0, &buffer))) + { + IMFDXGIBuffer *dxgi_buffer; + + if (SUCCEEDED(hr = IMFMediaBuffer_QueryInterface(buffer, &IID_IMFDXGIBuffer, (void **)&dxgi_buffer))) + { + hr = IMFDXGIBuffer_GetResource(dxgi_buffer, &IID_ID3D11Resource, (void **)resource); + IMFDXGIBuffer_Release(dxgi_buffer); + } + + IMFMediaBuffer_Release(buffer); + } + + return hr; +} + +static HRESULT get_d3d11_video_device(ID3D11Resource *resource, ID3D11VideoDevice **video_device) +{ + ID3D11Device *device; + HRESULT hr; + + ID3D11Resource_GetDevice(resource, &device); + hr = ID3D11Device_QueryInterface(device, &IID_ID3D11VideoDevice, (void **)video_device); + ID3D11Device_Release(device); + return hr; +} + +static HRESULT get_d3d11_video_context(ID3D11Resource *resource, ID3D11VideoContext **video_context) +{ + ID3D11DeviceContext *context; + ID3D11Device *device; + HRESULT hr; + + ID3D11Resource_GetDevice(resource, &device); + ID3D11Device_GetImmediateContext(device, &context); + hr = ID3D11DeviceContext_QueryInterface(context, &IID_ID3D11VideoContext, (void **)video_context); + ID3D11DeviceContext_Release(context); + ID3D11Device_Release(device); + return hr; +} + +static HRESULT create_video_processor_enumerator(ID3D11Resource *resource, UINT64 input_size, UINT64 output_size, + ID3D11VideoDevice **video_device, ID3D11VideoProcessorEnumerator **enumerator) +{ + D3D11_VIDEO_PROCESSOR_CONTENT_DESC enum_desc = {0}; + HRESULT hr; + + enum_desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE; + enum_desc.InputFrameRate.Denominator = 1; + enum_desc.InputFrameRate.Numerator = 1; + enum_desc.InputWidth = input_size >> 32; + enum_desc.InputHeight = (UINT32)input_size; + enum_desc.OutputFrameRate.Denominator = 1; + enum_desc.OutputFrameRate.Numerator = 1; + enum_desc.OutputWidth = output_size >> 32; + enum_desc.OutputHeight = (UINT32)output_size; + enum_desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL; + + if (FAILED(hr = get_d3d11_video_device(resource, video_device))) + return hr; + if (FAILED(hr = ID3D11VideoDevice_CreateVideoProcessorEnumerator(*video_device, &enum_desc, enumerator))) + { + ID3D11VideoDevice_Release(*video_device); + *video_device = NULL; + } + + return hr; +} + +struct resource_desc +{ + GUID subtype; + UINT64 frame_size; +}; + +static HRESULT init_d3d11_video_processor(const struct resource_desc *input_desc, ID3D11Resource *input, + const struct resource_desc *output_desc, ID3D11Resource *output, ID3D11VideoProcessor **processor, + ID3D11VideoProcessorInputView **input_view, ID3D11VideoProcessorOutputView **output_view) +{ + D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_view_desc = {0}; + D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_view_desc = {0}; + ID3D11VideoProcessorEnumerator *enumerator; + UINT input_flags = 0, output_flags = 0; + ID3D11VideoDevice *device; + HRESULT hr; + + *processor = NULL; + *input_view = NULL; + *output_view = NULL; + + input_view_desc.FourCC = input_desc->subtype.Data1; + input_view_desc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D; + input_view_desc.Texture2D.MipSlice = 0; + input_view_desc.Texture2D.ArraySlice = 0; + + output_view_desc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D; + output_view_desc.Texture2D.MipSlice = 0; + + /* assume input and output have the same device */ + if (FAILED(hr = create_video_processor_enumerator(input, input_desc->frame_size, + output_desc->frame_size, &device, &enumerator))) + return hr; + + if (FAILED(hr = ID3D11VideoProcessorEnumerator_CheckVideoProcessorFormat(enumerator, + input_desc->subtype.Data1, &input_flags)) + || FAILED(hr = ID3D11VideoProcessorEnumerator_CheckVideoProcessorFormat(enumerator, + output_desc->subtype.Data1, &output_flags))) + goto failed; + if (!(input_flags & D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_INPUT) + || !(output_flags & D3D11_VIDEO_PROCESSOR_FORMAT_SUPPORT_OUTPUT)) + { + hr = MF_E_INVALIDMEDIATYPE; + goto failed; + } + + if (FAILED(hr = ID3D11VideoDevice_CreateVideoProcessorInputView(device, input, enumerator, + &input_view_desc, input_view))) + goto failed; + if (FAILED(hr = ID3D11VideoDevice_CreateVideoProcessorOutputView(device, output, enumerator, + &output_view_desc, output_view))) + { + ID3D11VideoProcessorInputView_Release(*input_view); + *input_view = NULL; + goto failed; + } + + if (FAILED(hr = ID3D11VideoDevice_CreateVideoProcessor(device, enumerator, 0, processor))) + { + ID3D11VideoProcessorOutputView_Release(*output_view); + *output_view = NULL; + ID3D11VideoProcessorInputView_Release(*input_view); + *input_view = NULL; + goto failed; + } + +failed: + ID3D11VideoProcessorEnumerator_Release(enumerator); + ID3D11VideoDevice_Release(device); + return hr; +} + +static HRESULT video_processor_process_output_d3d11(struct video_processor *processor, + IMFSample *input_sample, IMFSample *output_sample) +{ + D3D11_VIDEO_PROCESSOR_STREAM streams = {0}; + struct resource_desc input_desc, output_desc; + ID3D11VideoProcessorOutputView *output_view; + ID3D11VideoProcessorInputView *input_view; + ID3D11VideoProcessor *video_processor; + ID3D11VideoContext *video_context; + ID3D11Resource *input, *output; + MFVideoArea aperture; + RECT rect = {0}; + LONGLONG time; + DWORD flags; + HRESULT hr; + + if (FAILED(hr = IMFMediaType_GetUINT64(processor->input_type, &MF_MT_FRAME_SIZE, &input_desc.frame_size)) + || FAILED(hr = IMFMediaType_GetGUID(processor->input_type, &MF_MT_SUBTYPE, &input_desc.subtype)) + || FAILED(hr = IMFMediaType_GetUINT64(processor->output_type, &MF_MT_FRAME_SIZE, &output_desc.frame_size)) + || FAILED(hr = IMFMediaType_GetGUID(processor->output_type, &MF_MT_SUBTYPE, &output_desc.subtype))) + return hr; + + if (FAILED(hr = video_processor_get_d3d11_resource(input_sample, &input))) + return hr; + if (FAILED(hr = video_processor_get_d3d11_resource(output_sample, &output))) + { + ID3D11Resource_Release(input); + return hr; + } + + if (FAILED(hr = get_d3d11_video_context(input, &video_context))) + goto failed; + if (FAILED(hr = init_d3d11_video_processor(&input_desc, input, &output_desc, output, + &video_processor, &input_view, &output_view))) + { + ID3D11VideoContext_Release(video_context); + goto failed; + } + + streams.Enable = TRUE; + streams.OutputIndex = 0; + streams.InputFrameOrField = 0; + streams.PastFrames = 0; + streams.FutureFrames = 0; + streams.pInputSurface = input_view; + + if (SUCCEEDED(IMFMediaType_GetBlob(processor->input_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), NULL))) + SetRect(&rect, aperture.OffsetX.value, aperture.OffsetY.value, aperture.OffsetX.value + aperture.Area.cx, + aperture.OffsetY.value + aperture.Area.cy); + else + SetRect(&rect, 0, 0, input_desc.frame_size >> 32, (UINT32)input_desc.frame_size); + ID3D11VideoContext_VideoProcessorSetStreamSourceRect(video_context, video_processor, 0, TRUE, &rect); + + if (SUCCEEDED(IMFMediaType_GetBlob(processor->output_type, &MF_MT_MINIMUM_DISPLAY_APERTURE, (BYTE *)&aperture, sizeof(aperture), NULL))) + SetRect(&rect, aperture.OffsetX.value, aperture.OffsetY.value, aperture.OffsetX.value + aperture.Area.cx, + aperture.OffsetY.value + aperture.Area.cy); + else + SetRect(&rect, 0, 0, output_desc.frame_size >> 32, (UINT32)output_desc.frame_size); + ID3D11VideoContext_VideoProcessorSetStreamDestRect(video_context, video_processor, 0, TRUE, &rect); + + ID3D11VideoContext_VideoProcessorBlt(video_context, video_processor, output_view, 0, 1, &streams); + + IMFSample_CopyAllItems(input_sample, (IMFAttributes *)output_sample); + if (SUCCEEDED(IMFSample_GetSampleDuration(input_sample, &time))) + IMFSample_SetSampleDuration(output_sample, time); + if (SUCCEEDED(IMFSample_GetSampleTime(input_sample, &time))) + IMFSample_SetSampleTime(output_sample, time); + if (SUCCEEDED(IMFSample_GetSampleFlags(input_sample, &flags))) + IMFSample_SetSampleFlags(output_sample, flags); + + ID3D11VideoProcessorOutputView_Release(output_view); + ID3D11VideoProcessorInputView_Release(input_view); + ID3D11VideoProcessor_Release(video_processor); + ID3D11VideoContext_Release(video_context); + +failed: + ID3D11Resource_Release(output); + ID3D11Resource_Release(input); + return hr; } static struct video_processor *impl_from_IMFTransform(IMFTransform *iface) @@ -158,6 +460,9 @@ static ULONG WINAPI video_processor_Release(IMFTransform *iface) if (!refcount) { + video_processor_uninit_allocator(impl); + if (impl->device_manager) + IUnknown_Release(impl->device_manager); if (impl->wg_transform) wg_transform_destroy(impl->wg_transform); if (impl->input_type) @@ -230,7 +535,7 @@ static HRESULT WINAPI video_processor_GetAttributes(IMFTransform *iface, IMFAttr { struct video_processor *impl = impl_from_IMFTransform(iface); - FIXME("iface %p, attributes %p semi-stub!\n", iface, attributes); + TRACE("iface %p, attributes %p\n", iface, attributes); if (!attributes) return E_POINTER; @@ -249,7 +554,7 @@ static HRESULT WINAPI video_processor_GetOutputStreamAttributes(IMFTransform *if { struct video_processor *impl = impl_from_IMFTransform(iface); - FIXME("iface %p, id %#lx, attributes %p semi-stub!\n", iface, id, attributes); + TRACE("iface %p, id %#lx, attributes %p\n", iface, id, attributes); if (!attributes) return E_POINTER; @@ -354,6 +659,22 @@ static HRESULT WINAPI video_processor_SetInputType(IMFTransform *iface, DWORD id TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); + if (!type) + { + if (impl->input_type) + { + IMFMediaType_Release(impl->input_type); + impl->input_type = NULL; + } + if (impl->wg_transform) + { + wg_transform_destroy(impl->wg_transform); + impl->wg_transform = 0; + } + + return S_OK; + } + if (FAILED(IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || !IsEqualGUID(&major, &MFMediaType_Video)) return E_INVALIDARG; @@ -397,6 +718,22 @@ static HRESULT WINAPI video_processor_SetOutputType(IMFTransform *iface, DWORD i TRACE("iface %p, id %#lx, type %p, flags %#lx.\n", iface, id, type, flags); + if (!type) + { + if (impl->output_type) + { + IMFMediaType_Release(impl->output_type); + impl->output_type = NULL; + } + if (impl->wg_transform) + { + wg_transform_destroy(impl->wg_transform); + impl->wg_transform = 0; + } + + return S_OK; + } + if (FAILED(IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || !IsEqualGUID(&major, &MFMediaType_Video)) return E_INVALIDARG; @@ -413,6 +750,9 @@ static HRESULT WINAPI video_processor_SetOutputType(IMFTransform *iface, DWORD i if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; + if (FAILED(hr = video_processor_uninit_allocator(impl))) + return hr; + if (impl->output_type) IMFMediaType_Release(impl->output_type); IMFMediaType_AddRef((impl->output_type = type)); @@ -513,8 +853,33 @@ static HRESULT WINAPI video_processor_ProcessEvent(IMFTransform *iface, DWORD id static HRESULT WINAPI video_processor_ProcessMessage(IMFTransform *iface, MFT_MESSAGE_TYPE message, ULONG_PTR param) { - FIXME("iface %p, message %#x, param %#Ix stub!\n", iface, message, param); - return S_OK; + struct video_processor *processor = impl_from_IMFTransform(iface); + HRESULT hr; + + TRACE("iface %p, message %#x, param %Ix.\n", iface, message, param); + + switch (message) + { + case MFT_MESSAGE_SET_D3D_MANAGER: + if (FAILED(hr = video_processor_uninit_allocator(processor))) + return hr; + + if (processor->device_manager) + { + processor->output_info.dwFlags &= ~MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; + IUnknown_Release(processor->device_manager); + } + if ((processor->device_manager = (IUnknown *)param)) + { + IUnknown_AddRef(processor->device_manager); + processor->output_info.dwFlags |= MFT_OUTPUT_STREAM_PROVIDES_SAMPLES; + } + return S_OK; + + default: + FIXME("Ignoring message %#x.\n", message); + return S_OK; + } } static HRESULT WINAPI video_processor_ProcessInput(IMFTransform *iface, DWORD id, IMFSample *sample, DWORD flags) @@ -525,8 +890,12 @@ static HRESULT WINAPI video_processor_ProcessInput(IMFTransform *iface, DWORD id if (!impl->wg_transform) return MF_E_TRANSFORM_TYPE_NOT_SET; + if (impl->input_sample) + return MF_E_NOTACCEPTING; - return wg_transform_push_mf(impl->wg_transform, sample, impl->wg_sample_queue); + impl->input_sample = sample; + IMFSample_AddRef(impl->input_sample); + return S_OK; } static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD flags, DWORD count, @@ -534,6 +903,7 @@ static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD f { struct video_processor *impl = impl_from_IMFTransform(iface); MFT_OUTPUT_STREAM_INFO info; + IMFSample *input_sample, *output_sample; HRESULT hr; TRACE("iface %p, flags %#lx, count %lu, samples %p, status %p.\n", iface, flags, count, samples, status); @@ -545,16 +915,50 @@ static HRESULT WINAPI video_processor_ProcessOutput(IMFTransform *iface, DWORD f return MF_E_TRANSFORM_TYPE_NOT_SET; samples->dwStatus = 0; - if (!samples->pSample) - return E_INVALIDARG; - if (FAILED(hr = IMFTransform_GetOutputStreamInfo(iface, 0, &info))) return hr; - if (SUCCEEDED(hr = wg_transform_read_mf(impl->wg_transform, samples->pSample, - info.cbSize, NULL, &samples->dwStatus))) + if (!(input_sample = impl->input_sample)) + return MF_E_TRANSFORM_NEED_MORE_INPUT; + impl->input_sample = NULL; + + if (impl->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) + { + if (FAILED(hr = video_processor_init_allocator(impl)) + || FAILED(hr = IMFVideoSampleAllocatorEx_AllocateSample(impl->allocator, &output_sample))) + { + IMFSample_Release(input_sample); + return hr; + } + } + else + { + if (!(output_sample = samples->pSample)) + { + IMFSample_Release(input_sample); + return E_INVALIDARG; + } + IMFSample_AddRef(output_sample); + } + + if (FAILED(hr = video_processor_process_output_d3d11(impl, input_sample, output_sample))) + { + if (FAILED(hr = wg_transform_push_mf(impl->wg_transform, input_sample, impl->wg_sample_queue))) + goto done; + if (FAILED(hr = wg_transform_read_mf(impl->wg_transform, output_sample, info.cbSize, &samples->dwStatus))) + goto done; wg_sample_queue_flush(impl->wg_sample_queue, false); + } + + if (impl->output_info.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES) + { + samples->pSample = output_sample; + IMFSample_AddRef(output_sample); + } +done: + IMFSample_Release(output_sample); + IMFSample_Release(input_sample); return hr; } @@ -590,45 +994,39 @@ static const IMFTransformVtbl video_processor_vtbl = HRESULT video_processor_create(REFIID riid, void **ret) { - static const struct wg_format input_format = + const MFVIDEOFORMAT input_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_I420, - .width = 1920, - .height = 1080, - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MFVideoFormat_I420, }; - static const struct wg_format output_format = + const MFVIDEOFORMAT output_format = { - .major_type = WG_MAJOR_TYPE_VIDEO, - .u.video = - { - .format = WG_VIDEO_FORMAT_NV12, - .width = 1920, - .height = 1080, - }, + .dwSize = sizeof(MFVIDEOFORMAT), + .videoInfo = {.dwWidth = 1920, .dwHeight = 1080}, + .guidFormat = MFVideoFormat_NV12, }; - struct wg_transform_attrs attrs = {0}; - wg_transform_t transform; struct video_processor *impl; HRESULT hr; TRACE("riid %s, ret %p.\n", debugstr_guid(riid), ret); - if (!(transform = wg_transform_create(&input_format, &output_format, &attrs))) + if (FAILED(hr = check_video_transform_support(&input_format, &output_format))) { ERR_(winediag)("GStreamer doesn't support video conversion, please install appropriate plugins.\n"); - return E_FAIL; + return hr; } - wg_transform_destroy(transform); if (!(impl = calloc(1, sizeof(*impl)))) return E_OUTOFMEMORY; if (FAILED(hr = MFCreateAttributes(&impl->attributes, 0))) goto failed; + if (FAILED(hr = IMFAttributes_SetUINT32(impl->attributes, &MF_SA_D3D11_AWARE, TRUE))) + goto failed; + /* native only has MF_SA_D3D_AWARE on Win7, but it is useful to have in mfreadwrite */ + if (FAILED(hr = IMFAttributes_SetUINT32(impl->attributes, &MF_SA_D3D_AWARE, TRUE))) + goto failed; if (FAILED(hr = MFCreateAttributes(&impl->output_attributes, 0))) goto failed; if (FAILED(hr = wg_sample_queue_create(&impl->wg_sample_queue))) diff --git a/dlls/winegstreamer/wg_format.c b/dlls/winegstreamer/wg_format.c index 5e560c50170..f08c9e35a2f 100644 --- a/dlls/winegstreamer/wg_format.c +++ b/dlls/winegstreamer/wg_format.c @@ -96,7 +96,7 @@ static uint32_t wg_channel_position_from_gst(GstAudioChannelPosition position) return 0; } -static uint32_t wg_channel_mask_from_gst(const GstAudioInfo *info) +uint32_t wg_channel_mask_from_gst(const GstAudioInfo *info) { uint32_t mask = 0, position; unsigned int i; @@ -141,12 +141,12 @@ static enum wg_video_format wg_video_format_from_gst(GstVideoFormat format) return WG_VIDEO_FORMAT_BGRx; case GST_VIDEO_FORMAT_BGR: return WG_VIDEO_FORMAT_BGR; - case GST_VIDEO_FORMAT_RGBA: - return WG_VIDEO_FORMAT_RGBA; case GST_VIDEO_FORMAT_RGB15: return WG_VIDEO_FORMAT_RGB15; case GST_VIDEO_FORMAT_RGB16: return WG_VIDEO_FORMAT_RGB16; + case GST_VIDEO_FORMAT_RGBA: + return WG_VIDEO_FORMAT_RGBA; case GST_VIDEO_FORMAT_AYUV: return WG_VIDEO_FORMAT_AYUV; case GST_VIDEO_FORMAT_I420: @@ -267,25 +267,6 @@ static void wg_format_from_caps_audio_wma(struct wg_format *format, const GstCap gst_buffer_unmap(codec_data, &map); } -static void wg_format_from_caps_audio_encoded(struct wg_format *format, const GstCaps *caps, - const GstAudioInfo *info) -{ - gchar *str; - gint len; - - format->major_type = WG_MAJOR_TYPE_AUDIO_ENCODED; - format->u.audio.rate = info->rate; - format->u.audio.channels = info->channels; - - str = gst_caps_to_string(caps); - len = strlen(str) + 1; - if (len >= ARRAY_SIZE(format->u.audio.caps)) - GST_FIXME("wg_format.audio.caps buffer is too small, need %u bytes", len); - else - memcpy(format->u.audio.caps, str, len); - g_free(str); -} - static void wg_format_from_caps_video_cinepak(struct wg_format *format, const GstCaps *caps) { const GstStructure *structure = gst_caps_get_structure(caps, 0); @@ -411,66 +392,47 @@ static void wg_format_from_caps_video_mpeg1(struct wg_format *format, const GstC format->u.video.fps_d = fps_d; } -static void wg_format_from_caps_video_encoded(struct wg_format *format, const GstCaps *caps, - const GstVideoInfo *info) -{ - gchar *str; - gint len; - - format->major_type = WG_MAJOR_TYPE_VIDEO_ENCODED; - format->u.video.width = info->width; - format->u.video.height = info->height; - format->u.video.fps_n = info->fps_n; - format->u.video.fps_d = info->fps_d; - - str = gst_caps_to_string(caps); - len = strlen(str) + 1; - if (len >= ARRAY_SIZE(format->u.video.caps)) - GST_FIXME("wg_format.video.caps buffer is too small, need %u bytes", len); - else - memcpy(format->u.video.caps, str, len); - g_free(str); -} - void wg_format_from_caps(struct wg_format *format, const GstCaps *caps) { const GstStructure *structure = gst_caps_get_structure(caps, 0); const char *name = gst_structure_get_name(structure); - GstAudioInfo audio_info; - GstVideoInfo video_info; gboolean parsed; memset(format, 0, sizeof(*format)); - if (g_str_has_prefix(name, "audio/") && gst_audio_info_from_caps(&audio_info, caps)) + if (!strcmp(name, "audio/x-raw")) { - if (GST_AUDIO_INFO_FORMAT(&audio_info) != GST_AUDIO_FORMAT_ENCODED) - wg_format_from_audio_info(format, &audio_info); - else if (!strcmp(name, "audio/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) - wg_format_from_caps_audio_mpeg1(format, caps); - else if (!strcmp(name, "audio/x-wma")) - wg_format_from_caps_audio_wma(format, caps); - else - { - GST_FIXME("Using fallback for encoded audio caps %" GST_PTR_FORMAT ".", caps); - wg_format_from_caps_audio_encoded(format, caps, &audio_info); - } + GstAudioInfo info; + + if (gst_audio_info_from_caps(&info, caps)) + wg_format_from_audio_info(format, &info); } - else if (g_str_has_prefix(name, "video/") && gst_video_info_from_caps(&video_info, caps)) + else if (!strcmp(name, "video/x-raw")) { - if (GST_VIDEO_INFO_FORMAT(&video_info) != GST_VIDEO_FORMAT_ENCODED) - wg_format_from_video_info(format, &video_info); - else if (!strcmp(name, "video/x-cinepak")) - wg_format_from_caps_video_cinepak(format, caps); - else if (!strcmp(name, "video/x-wmv")) - wg_format_from_caps_video_wmv(format, caps); - else if (!strcmp(name, "video/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) - wg_format_from_caps_video_mpeg1(format, caps); - else - { - GST_FIXME("Using fallback for encoded video caps %" GST_PTR_FORMAT ".", caps); - wg_format_from_caps_video_encoded(format, caps, &video_info); - } + GstVideoInfo info; + + if (gst_video_info_from_caps(&info, caps)) + wg_format_from_video_info(format, &info); + } + else if (!strcmp(name, "audio/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) + { + wg_format_from_caps_audio_mpeg1(format, caps); + } + else if (!strcmp(name, "audio/x-wma")) + { + wg_format_from_caps_audio_wma(format, caps); + } + else if (!strcmp(name, "video/x-cinepak")) + { + wg_format_from_caps_video_cinepak(format, caps); + } + else if (!strcmp(name, "video/x-wmv")) + { + wg_format_from_caps_video_wmv(format, caps); + } + else if (!strcmp(name, "video/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) + { + wg_format_from_caps_video_mpeg1(format, caps); } else { @@ -608,9 +570,9 @@ static GstVideoFormat wg_video_format_to_gst(enum wg_video_format format) case WG_VIDEO_FORMAT_BGRA: return GST_VIDEO_FORMAT_BGRA; case WG_VIDEO_FORMAT_BGRx: return GST_VIDEO_FORMAT_BGRx; case WG_VIDEO_FORMAT_BGR: return GST_VIDEO_FORMAT_BGR; - case WG_VIDEO_FORMAT_RGBA: return GST_VIDEO_FORMAT_RGBA; case WG_VIDEO_FORMAT_RGB15: return GST_VIDEO_FORMAT_RGB15; case WG_VIDEO_FORMAT_RGB16: return GST_VIDEO_FORMAT_RGB16; + case WG_VIDEO_FORMAT_RGBA: return GST_VIDEO_FORMAT_RGBA; case WG_VIDEO_FORMAT_AYUV: return GST_VIDEO_FORMAT_AYUV; case WG_VIDEO_FORMAT_I420: return GST_VIDEO_FORMAT_I420; case WG_VIDEO_FORMAT_NV12: return GST_VIDEO_FORMAT_NV12; @@ -920,8 +882,6 @@ GstCaps *wg_format_to_caps(const struct wg_format *format) return wg_format_to_caps_audio_mpeg4(format); case WG_MAJOR_TYPE_AUDIO_WMA: return wg_format_to_caps_audio_wma(format); - case WG_MAJOR_TYPE_AUDIO_ENCODED: - return gst_caps_from_string(format->u.audio.caps); case WG_MAJOR_TYPE_VIDEO: return wg_format_to_caps_video(format); case WG_MAJOR_TYPE_VIDEO_CINEPAK: @@ -934,8 +894,6 @@ GstCaps *wg_format_to_caps(const struct wg_format *format) return wg_format_to_caps_video_indeo(format); case WG_MAJOR_TYPE_VIDEO_MPEG1: return wg_format_to_caps_video_mpeg1(format); - case WG_MAJOR_TYPE_VIDEO_ENCODED: - return gst_caps_from_string(format->u.video.caps); } assert(0); return NULL; @@ -951,11 +909,9 @@ bool wg_format_compare(const struct wg_format *a, const struct wg_format *b) case WG_MAJOR_TYPE_AUDIO_MPEG1: case WG_MAJOR_TYPE_AUDIO_MPEG4: case WG_MAJOR_TYPE_AUDIO_WMA: - case WG_MAJOR_TYPE_AUDIO_ENCODED: case WG_MAJOR_TYPE_VIDEO_H264: case WG_MAJOR_TYPE_VIDEO_INDEO: case WG_MAJOR_TYPE_VIDEO_MPEG1: - case WG_MAJOR_TYPE_VIDEO_ENCODED: GST_FIXME("Format %u not implemented!", a->major_type); /* fallthrough */ case WG_MAJOR_TYPE_UNKNOWN: diff --git a/dlls/winegstreamer/wg_media_type.c b/dlls/winegstreamer/wg_media_type.c new file mode 100644 index 00000000000..fe4216e5a2e --- /dev/null +++ b/dlls/winegstreamer/wg_media_type.c @@ -0,0 +1,892 @@ +/* + * Copyright 2024 Rémi Bernon for CodeWeavers + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA + */ + +#if 0 +#pragma makedep unix +#endif + +#include "config.h" + +#include +#include + +#include +#include +#include + +#include "ntstatus.h" +#define WIN32_NO_STATUS +#include "winternl.h" +#include "windef.h" +#include "winbase.h" + +#include "initguid.h" +#include "d3d9types.h" +#include "mfapi.h" +#include "wmcodecdsp.h" + +#include "unix_private.h" + +#define WG_GUID_FORMAT "{%08x-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x}" +#define WG_GUID_ARGS(guid) (int)(guid).Data1, (guid).Data2, (guid).Data3, (guid).Data4[0], \ + (guid).Data4[1], (guid).Data4[2], (guid).Data4[3], (guid).Data4[4], \ + (guid).Data4[5], (guid).Data4[6], (guid).Data4[7] + +#define WG_RATIO_FORMAT "%d:%d" +#define WG_RATIO_ARGS(ratio) (int)(ratio).Numerator, (int)(ratio).Denominator + +#define WG_APERTURE_FORMAT "(%d,%d)-(%d,%d)" +#define WG_APERTURE_ARGS(aperture) (int)(aperture).OffsetX.value, (int)(aperture).OffsetY.value, \ + (int)(aperture).Area.cx, (int)(aperture).Area.cy + +static const GUID GUID_NULL; + +DEFINE_MEDIATYPE_GUID(MFAudioFormat_GStreamer,MAKEFOURCC('G','S','T','a')); +DEFINE_MEDIATYPE_GUID(MFAudioFormat_RAW_AAC,WAVE_FORMAT_RAW_AAC1); +DEFINE_MEDIATYPE_GUID(MFAudioFormat_MSAudio1,WAVE_FORMAT_MSAUDIO1); + +DEFINE_MEDIATYPE_GUID(MFVideoFormat_GStreamer,MAKEFOURCC('G','S','T','v')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_CVID,MAKEFOURCC('c','v','i','d')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_IV50,MAKEFOURCC('I','V','5','0')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_VC1S,MAKEFOURCC('V','C','1','S')); +DEFINE_MEDIATYPE_GUID(MFVideoFormat_ABGR32,D3DFMT_A8B8G8R8); + +static void init_caps_codec_data(GstCaps *caps, const void *codec_data, int codec_data_size) +{ + GstBuffer *buffer; + + if (codec_data_size > 0 && (buffer = gst_buffer_new_and_alloc(codec_data_size))) + { + gst_buffer_fill(buffer, 0, codec_data, codec_data_size); + gst_caps_set_simple(caps, "codec_data", GST_TYPE_BUFFER, buffer, NULL); + gst_buffer_unref(buffer); + } +} + +static void init_caps_from_wave_format_mpeg1(GstCaps *caps, const MPEG1WAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, &format->wfx + 1, format->wfx.cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/mpeg"); + gst_caps_set_simple(caps, "mpegversion", G_TYPE_INT, 1, NULL); + gst_caps_set_simple(caps, "layer", G_TYPE_INT, format->fwHeadLayer, NULL); + gst_caps_set_simple(caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); +} + +static void init_caps_from_wave_format_mp3(GstCaps *caps, const MPEGLAYER3WAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, &format->wfx + 1, format->wfx.cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/mpeg"); + gst_caps_set_simple(caps, "mpegversion", G_TYPE_INT, 1, NULL); + gst_caps_set_simple(caps, "layer", G_TYPE_INT, 3, NULL); + gst_caps_set_simple(caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); +} + +static void init_caps_from_wave_format_aac(GstCaps *caps, const HEAACWAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, format->pbAudioSpecificConfig, format_size - sizeof(format->wfInfo)); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/mpeg"); + gst_caps_set_simple(caps, "mpegversion", G_TYPE_INT, 4, NULL); + + switch (format->wfInfo.wPayloadType) + { + case 0: gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "raw", NULL); break; + case 1: gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "adts", NULL); break; + case 2: gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "adif", NULL); break; + case 3: gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "loas", NULL); break; + } + + /* FIXME: Use gst_codec_utils_aac_caps_set_level_and_profile from GStreamer pbutils library */ +} + +static void init_caps_from_wave_format_aac_raw(GstCaps *caps, const WAVEFORMATEX *format, UINT32 format_size) +{ + init_caps_codec_data(caps, format + 1, format->cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/mpeg"); + gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "raw", NULL); + gst_caps_set_simple(caps, "mpegversion", G_TYPE_INT, 4, NULL); +} + +static void init_caps_from_wave_format_wma1(GstCaps *caps, const MSAUDIO1WAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, &format->wfx + 1, format->wfx.cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 1, NULL); + gst_caps_set_simple(caps, "block_align", G_TYPE_INT, format->wfx.nBlockAlign, NULL); + gst_caps_set_simple(caps, "depth", G_TYPE_INT, format->wfx.wBitsPerSample, NULL); + gst_caps_set_simple(caps, "bitrate", G_TYPE_INT, format->wfx.nAvgBytesPerSec * 8, NULL); +} + +static void init_caps_from_wave_format_wma2(GstCaps *caps, const WMAUDIO2WAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, &format->wfx + 1, format->wfx.cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 2, NULL); + gst_caps_set_simple(caps, "block_align", G_TYPE_INT, format->wfx.nBlockAlign, NULL); + gst_caps_set_simple(caps, "depth", G_TYPE_INT, format->wfx.wBitsPerSample, NULL); + gst_caps_set_simple(caps, "bitrate", G_TYPE_INT, format->wfx.nAvgBytesPerSec * 8, NULL); +} + +static void init_caps_from_wave_format_wma3(GstCaps *caps, const WMAUDIO3WAVEFORMAT *format, UINT32 format_size) +{ + init_caps_codec_data(caps, &format->wfx + 1, format->wfx.cbSize); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "audio/x-wma"); + gst_caps_set_simple(caps, "wmaversion", G_TYPE_INT, 3, NULL); + gst_caps_set_simple(caps, "block_align", G_TYPE_INT, format->wfx.nBlockAlign, NULL); + gst_caps_set_simple(caps, "depth", G_TYPE_INT, format->wfx.wBitsPerSample, NULL); + gst_caps_set_simple(caps, "bitrate", G_TYPE_INT, format->wfx.nAvgBytesPerSec * 8, NULL); +} + +static void init_caps_from_wave_format(GstCaps *caps, const GUID *subtype, + const void *format, UINT32 format_size) +{ + if (IsEqualGUID(subtype, &MFAudioFormat_MPEG)) + return init_caps_from_wave_format_mpeg1(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_MP3)) + return init_caps_from_wave_format_mp3(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_AAC)) + return init_caps_from_wave_format_aac(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_RAW_AAC)) + return init_caps_from_wave_format_aac_raw(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_MSAudio1)) + return init_caps_from_wave_format_wma1(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_WMAudioV8)) + return init_caps_from_wave_format_wma2(caps, format, format_size); + if (IsEqualGUID(subtype, &MFAudioFormat_WMAudioV9) + || IsEqualGUID(subtype, &MFAudioFormat_WMAudio_Lossless)) + return init_caps_from_wave_format_wma3(caps, format, format_size); + + GST_FIXME("Unsupported subtype " WG_GUID_FORMAT, WG_GUID_ARGS(*subtype)); +} + +static GstAudioFormat wave_format_tag_to_gst_audio_format(UINT tag, UINT depth) +{ + switch (tag) + { + case WAVE_FORMAT_PCM: + if (depth == 32) return GST_AUDIO_FORMAT_S32LE; + if (depth == 24) return GST_AUDIO_FORMAT_S24LE; + if (depth == 16) return GST_AUDIO_FORMAT_S16LE; + if (depth == 8) return GST_AUDIO_FORMAT_U8; + break; + + case WAVE_FORMAT_IEEE_FLOAT: + if (depth == 64) return GST_AUDIO_FORMAT_F64LE; + if (depth == 32) return GST_AUDIO_FORMAT_F32LE; + break; + } + + return GST_AUDIO_FORMAT_ENCODED; +} + +static GstCaps *caps_from_wave_format_ex(const WAVEFORMATEX *format, UINT32 format_size, const GUID *subtype, UINT64 channel_mask) +{ + GstAudioFormat audio_format = wave_format_tag_to_gst_audio_format(subtype->Data1, format->wBitsPerSample); + const void *codec_data = format + 1; + GstCaps *caps; + + if (IsEqualGUID(subtype, &MFAudioFormat_GStreamer)) + return gst_caps_from_string(codec_data); + if (format_size > sizeof(*format) + 8 && !strncmp(codec_data, "audio/x-", 8)) + return gst_caps_from_string(codec_data); + + if (!(caps = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING, gst_audio_format_to_string(audio_format), + "layout", G_TYPE_STRING, "interleaved", "rate", G_TYPE_INT, format->nSamplesPerSec, + "channels", G_TYPE_INT, format->nChannels, "channel-mask", GST_TYPE_BITMASK, channel_mask, NULL))) + return NULL; + + if (audio_format == GST_AUDIO_FORMAT_ENCODED) + init_caps_from_wave_format(caps, subtype, format, format_size); + + return caps; +} + +static WAVEFORMATEX *strip_wave_format_extensible(const WAVEFORMATEXTENSIBLE *format_ext) +{ + UINT32 extra_size = format_ext->Format.cbSize + sizeof(WAVEFORMATEX) - sizeof(WAVEFORMATEXTENSIBLE); + WAVEFORMATEX *format; + + if (!(format = malloc(sizeof(*format) + extra_size))) + return NULL; + + *format = format_ext->Format; + format->cbSize = extra_size; + format->wFormatTag = format_ext->SubFormat.Data1; + memcpy(format + 1, format_ext + 1, extra_size); + return format; +} + +static GstCaps *caps_from_wave_format_extensible(const WAVEFORMATEXTENSIBLE *format, UINT32 format_size) +{ + WAVEFORMATEX *wfx; + GstCaps *caps; + + GST_TRACE("tag %#x, %u channels, sample rate %u, %u bytes/sec, alignment %u, %u bits/sample, " + "%u valid bps, channel mask %#x, subtype " WG_GUID_FORMAT ".", + format->Format.wFormatTag, format->Format.nChannels, (int)format->Format.nSamplesPerSec, + (int)format->Format.nAvgBytesPerSec, format->Format.nBlockAlign, format->Format.wBitsPerSample, + format->Samples.wValidBitsPerSample, (int)format->dwChannelMask, WG_GUID_ARGS(format->SubFormat)); + if (format->Format.cbSize) + { + guint extra_size = sizeof(WAVEFORMATEX) + format->Format.cbSize - sizeof(WAVEFORMATEXTENSIBLE); + GST_MEMDUMP("extra bytes:", (guint8 *)(format + 1), extra_size); + } + + if (!(wfx = strip_wave_format_extensible(format))) + return NULL; + + caps = caps_from_wave_format_ex(wfx, format_size + sizeof(*wfx) - sizeof(*format), + &format->SubFormat, format->dwChannelMask); + free(wfx); + return caps; +} + +static GstCaps *caps_from_wave_format(const void *format, UINT32 format_size) +{ + const WAVEFORMATEX *wfx = format; + GUID subtype = MFAudioFormat_Base; + UINT channel_mask; + + if (wfx->wFormatTag == WAVE_FORMAT_EXTENSIBLE) + return caps_from_wave_format_extensible(format, format_size); + + GST_TRACE("tag %#x, %u channels, sample rate %u, %u bytes/sec, alignment %u, %u bits/sample.", + wfx->wFormatTag, wfx->nChannels, (int)wfx->nSamplesPerSec, + (int)wfx->nAvgBytesPerSec, wfx->nBlockAlign, wfx->wBitsPerSample); + if (wfx->cbSize) GST_MEMDUMP("extra bytes:", (guint8 *)(wfx + 1), wfx->cbSize); + + subtype.Data1 = wfx->wFormatTag; + channel_mask = gst_audio_channel_get_fallback_mask(wfx->nChannels); + return caps_from_wave_format_ex(format, format_size, &subtype, channel_mask); +} + +static void init_caps_from_video_cinepak(GstCaps *caps, const MFVIDEOFORMAT *format, UINT format_size) +{ + init_caps_codec_data(caps, format + 1, format_size - sizeof(*format)); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-cinepak"); +} + +static void init_caps_from_video_h264(GstCaps *caps, const MFVIDEOFORMAT *format, UINT format_size) +{ + init_caps_codec_data(caps, format + 1, format_size - sizeof(*format)); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-h264"); + gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, format_size - sizeof(*format) ? "avc" : "byte-stream", NULL); +} + +static void init_caps_from_video_wmv(GstCaps *caps, const MFVIDEOFORMAT *format, UINT format_size, + int wmv_version, const char *wmv_format) +{ + init_caps_codec_data(caps, format + 1, format_size - sizeof(*format)); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-wmv"); + gst_caps_set_simple(caps, "wmvversion", G_TYPE_INT, wmv_version, NULL); + gst_caps_set_simple(caps, "format", G_TYPE_STRING, wmv_format, NULL); +} + +static void init_caps_from_video_indeo(GstCaps *caps, const MFVIDEOFORMAT *format, UINT format_size) +{ + init_caps_codec_data(caps, format + 1, format_size - sizeof(*format)); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/x-indeo"); + gst_caps_set_simple(caps, "indeoversion", G_TYPE_INT, 5, NULL); +} + +static void init_caps_from_video_mpeg(GstCaps *caps, const struct mpeg_video_format *format, UINT format_size) +{ + init_caps_codec_data(caps, format->sequence_header, format->sequence_header_count); + + gst_structure_remove_field(gst_caps_get_structure(caps, 0), "format"); + gst_structure_set_name(gst_caps_get_structure(caps, 0), "video/mpeg"); + gst_caps_set_simple(caps, "mpegversion", G_TYPE_INT, 1, NULL); + gst_caps_set_simple(caps, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); + gst_caps_set_simple(caps, "parsed", G_TYPE_BOOLEAN, TRUE, NULL); +} + +static void init_caps_from_video_subtype(GstCaps *caps, const GUID *subtype, const void *format, UINT format_size) +{ + if (IsEqualGUID(subtype, &MFVideoFormat_CVID)) + return init_caps_from_video_cinepak(caps, format, format_size); + if (IsEqualGUID(subtype, &MFVideoFormat_H264)) + return init_caps_from_video_h264(caps, format, format_size); + if (IsEqualGUID(subtype, &MFVideoFormat_WMV1)) + return init_caps_from_video_wmv(caps, format, format_size, 1, "WMV1"); + if (IsEqualGUID(subtype, &MFVideoFormat_WMV2)) + return init_caps_from_video_wmv(caps, format, format_size, 2, "WMV2"); + if (IsEqualGUID(subtype, &MFVideoFormat_WMV3)) + return init_caps_from_video_wmv(caps, format, format_size, 3, "WMV3"); + if (IsEqualGUID(subtype, &MEDIASUBTYPE_WMVA)) + return init_caps_from_video_wmv(caps, format, format_size, 3, "WMVA"); + if (IsEqualGUID(subtype, &MFVideoFormat_WVC1)) + return init_caps_from_video_wmv(caps, format, format_size, 3, "WVC1"); + if (IsEqualGUID(subtype, &MFVideoFormat_IV50)) + return init_caps_from_video_indeo(caps, format, format_size); + if (IsEqualGUID(subtype, &MEDIASUBTYPE_MPEG1Payload)) + return init_caps_from_video_mpeg(caps, format, format_size); + + GST_FIXME("Unsupported subtype " WG_GUID_FORMAT, WG_GUID_ARGS(*subtype)); +} + +static GstVideoFormat subtype_to_gst_video_format(const GUID *subtype) +{ + GUID base = *subtype; + base.Data1 = 0; + + if (IsEqualGUID(&base, &MFVideoFormat_Base)) + { + switch (subtype->Data1) + { + case D3DFMT_A8R8G8B8: return GST_VIDEO_FORMAT_BGRA; + case D3DFMT_X8R8G8B8: return GST_VIDEO_FORMAT_BGRx; + case D3DFMT_R8G8B8: return GST_VIDEO_FORMAT_BGR; + case D3DFMT_A8B8G8R8: return GST_VIDEO_FORMAT_RGBA; + case D3DFMT_X1R5G5B5: return GST_VIDEO_FORMAT_RGB15; + case D3DFMT_R5G6B5: return GST_VIDEO_FORMAT_RGB16; + case MAKEFOURCC('A','Y','U','V'): return GST_VIDEO_FORMAT_AYUV; + case MAKEFOURCC('I','4','2','0'): return GST_VIDEO_FORMAT_I420; + case MAKEFOURCC('I','Y','U','V'): return GST_VIDEO_FORMAT_I420; + case MAKEFOURCC('N','V','1','2'): return GST_VIDEO_FORMAT_NV12; + case MAKEFOURCC('U','Y','V','Y'): return GST_VIDEO_FORMAT_UYVY; + case MAKEFOURCC('Y','U','Y','2'): return GST_VIDEO_FORMAT_YUY2; + case MAKEFOURCC('Y','V','1','2'): return GST_VIDEO_FORMAT_YV12; + case MAKEFOURCC('Y','V','Y','U'): return GST_VIDEO_FORMAT_YVYU; + } + } + + return GST_VIDEO_FORMAT_ENCODED; +} + +static BOOL is_mf_video_area_empty(const MFVideoArea *area) +{ + return !area->OffsetX.value && !area->OffsetY.value && !area->Area.cx && !area->Area.cy; +} + +static GstCaps *caps_from_video_format(const MFVIDEOFORMAT *format, UINT32 format_size) +{ + GstVideoFormat video_format = subtype_to_gst_video_format(&format->guidFormat); + const void *codec_data = format + 1; + GstCaps *caps; + + GST_TRACE("subtype " WG_GUID_FORMAT " %ux%u, FPS " WG_RATIO_FORMAT ", aperture " WG_APERTURE_FORMAT ", " + "PAR " WG_RATIO_FORMAT ", videoFlags %#x.", + WG_GUID_ARGS(format->guidFormat), (int)format->videoInfo.dwWidth, (int)format->videoInfo.dwHeight, + WG_RATIO_ARGS(format->videoInfo.FramesPerSecond), WG_APERTURE_ARGS(format->videoInfo.MinimumDisplayAperture), + WG_RATIO_ARGS(format->videoInfo.PixelAspectRatio), (int)format->videoInfo.VideoFlags ); + if (format->dwSize > sizeof(*format)) GST_MEMDUMP("extra bytes:", (guint8 *)(format + 1), format->dwSize - sizeof(*format)); + + if (IsEqualGUID(&format->guidFormat, &MFVideoFormat_GStreamer)) + return gst_caps_from_string(codec_data); + if (format_size > sizeof(*format) + 8 && !strncmp(codec_data, "video/x-", 8)) + return gst_caps_from_string(codec_data); + + if (!(caps = gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, gst_video_format_to_string(video_format), NULL))) + return NULL; + + if (format->videoInfo.dwWidth) + gst_caps_set_simple(caps, "width", G_TYPE_INT, format->videoInfo.dwWidth, NULL); + if (format->videoInfo.dwHeight) + gst_caps_set_simple(caps, "height", G_TYPE_INT, format->videoInfo.dwHeight, NULL); + + if (format->videoInfo.PixelAspectRatio.Denominator) + gst_caps_set_simple(caps, "pixel-aspect-ratio", GST_TYPE_FRACTION, + format->videoInfo.PixelAspectRatio.Numerator, + format->videoInfo.PixelAspectRatio.Denominator, NULL); + if (format->videoInfo.FramesPerSecond.Denominator) + gst_caps_set_simple(caps, "framerate", GST_TYPE_FRACTION, + format->videoInfo.FramesPerSecond.Numerator, + format->videoInfo.FramesPerSecond.Denominator, NULL); + + if (!is_mf_video_area_empty(&format->videoInfo.MinimumDisplayAperture)) + { + gst_caps_set_simple(caps, "width", G_TYPE_INT, format->videoInfo.MinimumDisplayAperture.Area.cx, NULL); + gst_caps_set_simple(caps, "height", G_TYPE_INT, format->videoInfo.MinimumDisplayAperture.Area.cy, NULL); + } + + if (video_format == GST_VIDEO_FORMAT_ENCODED) + init_caps_from_video_subtype(caps, &format->guidFormat, format, format_size); + + return caps; +} + +GstCaps *caps_from_media_type(const struct wg_media_type *media_type) +{ + GstCaps *caps = NULL; + + if (IsEqualGUID(&media_type->major, &MFMediaType_Video)) + caps = caps_from_video_format(media_type->u.video, media_type->format_size); + else if (IsEqualGUID(&media_type->major, &MFMediaType_Audio)) + caps = caps_from_wave_format(media_type->u.audio, media_type->format_size); + + GST_TRACE("caps %"GST_PTR_FORMAT, caps); + return caps; +} + +static WORD wave_format_tag_from_gst_audio_format(GstAudioFormat audio_format) +{ + switch (audio_format) + { + case GST_AUDIO_FORMAT_U8: return WAVE_FORMAT_PCM; + case GST_AUDIO_FORMAT_S16LE: return WAVE_FORMAT_PCM; + case GST_AUDIO_FORMAT_S24LE: return WAVE_FORMAT_PCM; + case GST_AUDIO_FORMAT_S32LE: return WAVE_FORMAT_PCM; + case GST_AUDIO_FORMAT_F32LE: return WAVE_FORMAT_IEEE_FLOAT; + case GST_AUDIO_FORMAT_F64LE: return WAVE_FORMAT_IEEE_FLOAT; + default: return WAVE_FORMAT_EXTENSIBLE; + } +} + +static GUID subtype_from_gst_audio_format(GstAudioFormat audio_format) +{ + switch (audio_format) + { + case GST_AUDIO_FORMAT_U8: return MFAudioFormat_PCM; + case GST_AUDIO_FORMAT_S16LE: return MFAudioFormat_PCM; + case GST_AUDIO_FORMAT_S24LE: return MFAudioFormat_PCM; + case GST_AUDIO_FORMAT_S32LE: return MFAudioFormat_PCM; + case GST_AUDIO_FORMAT_F32LE: return MFAudioFormat_Float; + case GST_AUDIO_FORMAT_F64LE: return MFAudioFormat_Float; + default: return GUID_NULL; + } +} + +static void init_wave_format_ex_from_gst_caps(const GstCaps *caps, WORD format_tag, gint depth, + WAVEFORMATEX *format, UINT32 format_size) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + gint bitrate, channels, rate, block_align; + + memset(format, 0, format_size); + format->cbSize = format_size - sizeof(*format); + format->wFormatTag = format_tag; + format->wBitsPerSample = depth; + + if (gst_structure_get_int(structure, "channels", &channels)) + format->nChannels = channels; + if (gst_structure_get_int(structure, "rate", &rate)) + format->nSamplesPerSec = rate; + if (gst_structure_get_int(structure, "depth", &depth)) + format->wBitsPerSample = depth; + + format->nBlockAlign = format->wBitsPerSample * format->nChannels / 8; + format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign; + + if (gst_structure_get_int(structure, "block_align", &block_align)) + format->nBlockAlign = block_align; + if (gst_structure_get_int(structure, "bitrate", &bitrate)) + format->nAvgBytesPerSec = bitrate / 8; +} + +static GstBuffer *caps_get_buffer(const GstCaps *caps, const char *name, UINT32 *buffer_size) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + const GValue *buffer_value; + + if ((buffer_value = gst_structure_get_value(structure, name))) + { + GstBuffer *buffer = gst_value_get_buffer(buffer_value); + *buffer_size = gst_buffer_get_size(buffer); + return buffer; + } + + *buffer_size = 0; + return NULL; +} + +static NTSTATUS wave_format_extensible_from_gst_caps(const GstCaps *caps, const GUID *subtype, UINT32 depth, + UINT64 channel_mask, WAVEFORMATEXTENSIBLE *format, UINT32 *format_size) +{ + UINT32 capacity = *format_size, codec_data_size; + GstBuffer *codec_data = caps_get_buffer(caps, "codec_data", &codec_data_size); + + *format_size = sizeof(*format) + codec_data_size; + if (*format_size > capacity) + return STATUS_BUFFER_TOO_SMALL; + + init_wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_EXTENSIBLE, depth, &format->Format, *format_size); + format->Samples.wValidBitsPerSample = 0; + format->dwChannelMask = channel_mask; + format->SubFormat = *subtype; + + if (codec_data) + gst_buffer_extract(codec_data, 0, format + 1, codec_data_size); + + GST_TRACE("tag %#x, %u channels, sample rate %u, %u bytes/sec, alignment %u, %u bits/sample, " + "%u valid bps, channel mask %#x, subtype " WG_GUID_FORMAT ".", + format->Format.wFormatTag, format->Format.nChannels, (int)format->Format.nSamplesPerSec, + (int)format->Format.nAvgBytesPerSec, format->Format.nBlockAlign, format->Format.wBitsPerSample, + format->Samples.wValidBitsPerSample, (int)format->dwChannelMask, WG_GUID_ARGS(format->SubFormat)); + if (format->Format.cbSize) + { + guint extra_size = sizeof(WAVEFORMATEX) + format->Format.cbSize - sizeof(WAVEFORMATEXTENSIBLE); + GST_MEMDUMP("extra bytes:", (guint8 *)(format + 1), extra_size); + } + + return STATUS_SUCCESS; +} + +static NTSTATUS wave_format_ex_from_gst_caps(const GstCaps *caps, WORD format_tag, UINT32 depth, + UINT32 wave_format_size, WAVEFORMATEX *format, UINT32 *format_size) +{ + UINT32 capacity = *format_size, codec_data_size; + GstBuffer *codec_data = caps_get_buffer(caps, "codec_data", &codec_data_size); + + *format_size = max(wave_format_size, sizeof(*format) + codec_data_size); + if (*format_size > capacity) + return STATUS_BUFFER_TOO_SMALL; + + init_wave_format_ex_from_gst_caps(caps, format_tag, depth, format, *format_size); + + if (codec_data) + gst_buffer_extract(codec_data, 0, format + 1, codec_data_size); + + GST_TRACE("tag %#x, %u channels, sample rate %u, %u bytes/sec, alignment %u, %u bits/sample.", + format->wFormatTag, format->nChannels, (int)format->nSamplesPerSec, + (int)format->nAvgBytesPerSec, format->nBlockAlign, format->wBitsPerSample); + if (format->cbSize) GST_MEMDUMP("extra bytes:", (guint8 *)(format + 1), format->cbSize); + + return STATUS_SUCCESS; +} + +static NTSTATUS wave_format_from_gst_caps(const GstCaps *caps, void *format, UINT32 *format_size) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + GstAudioFormat audio_format = GST_AUDIO_FORMAT_ENCODED; + WORD format_tag = WAVE_FORMAT_EXTENSIBLE; + gint channels, depth = 0; + const gchar *str_value; + guint64 channel_mask; + + if ((str_value = gst_structure_get_string(structure, "format"))) + { + audio_format = gst_audio_format_from_string(str_value); + format_tag = wave_format_tag_from_gst_audio_format(audio_format); + depth = GST_AUDIO_FORMAT_INFO_DEPTH(gst_audio_format_get_info(audio_format)); + } + + if (!gst_structure_get_int(structure, "channels", &channels)) + channels = 1; + if (!gst_structure_get(structure, "channel-mask", GST_TYPE_BITMASK, &channel_mask, NULL)) + channel_mask = 0; + + if (format_tag == WAVE_FORMAT_EXTENSIBLE || channel_mask != 0) + { + GUID subtype = subtype_from_gst_audio_format(audio_format); + return wave_format_extensible_from_gst_caps(caps, &subtype, depth, channel_mask, format, format_size); + } + + return wave_format_ex_from_gst_caps(caps, format_tag, depth, sizeof(WAVEFORMATEX), format, format_size); +} + +static NTSTATUS mpeg_wave_format_from_gst_caps(const GstCaps *caps, WAVEFORMATEX *format, UINT32 *format_size) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + NTSTATUS status; + gint layer = 0; + + if (gst_structure_get_int(structure, "layer", &layer) && layer == 3) + return wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_MPEGLAYER3, 0, sizeof(MPEGLAYER3WAVEFORMAT), format, format_size); + + if (!(status = wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_MPEG, 0, sizeof(MPEG1WAVEFORMAT), format, format_size))) + { + MPEG1WAVEFORMAT *mpeg = CONTAINING_RECORD(format, MPEG1WAVEFORMAT, wfx); + mpeg->fwHeadLayer = layer; + } + + return status; +} + +static NTSTATUS wma_wave_format_from_gst_caps(const GstCaps *caps, WAVEFORMATEX *format, UINT32 *format_size) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + gint wmaversion; + + if (!gst_structure_get_int(structure, "wmaversion", &wmaversion)) + { + GST_WARNING("Missing \"wmaversion\" value in %" GST_PTR_FORMAT ".", caps); + return STATUS_INVALID_PARAMETER; + } + + if (wmaversion == 1) + return wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_MSAUDIO1, 0, sizeof(MSAUDIO1WAVEFORMAT), format, format_size); + if (wmaversion == 2) + return wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_WMAUDIO2, 0, sizeof(WMAUDIO2WAVEFORMAT), format, format_size); + if (wmaversion == 3) + return wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_WMAUDIO3, 0, sizeof(WMAUDIO3WAVEFORMAT), format, format_size); + + GST_FIXME("Unsupported wmaversion %u", wmaversion); + return STATUS_NOT_IMPLEMENTED; +} + +static NTSTATUS other_wave_format_from_gst_caps(const GstCaps *caps, WAVEFORMATEXTENSIBLE *format, + UINT32 *format_size) +{ + gchar *str = gst_caps_to_string(caps); + UINT32 capacity = *format_size, codec_data_size = strlen(str) + 1; + + *format_size = sizeof(*format) + codec_data_size; + if (*format_size > capacity) + { + g_free(str); + return STATUS_BUFFER_TOO_SMALL; + } + + init_wave_format_ex_from_gst_caps(caps, WAVE_FORMAT_EXTENSIBLE, 0, &format->Format, *format_size); + format->SubFormat = MFAudioFormat_GStreamer; + memcpy(format + 1, str, codec_data_size); + g_free(str); + + return STATUS_SUCCESS; +} + +static GUID subtype_from_gst_video_format(GstVideoFormat video_format) +{ + switch (video_format) + { + case GST_VIDEO_FORMAT_BGRA: return MFVideoFormat_ARGB32; + case GST_VIDEO_FORMAT_BGRx: return MFVideoFormat_RGB32; + case GST_VIDEO_FORMAT_BGR: return MFVideoFormat_RGB24; + case GST_VIDEO_FORMAT_RGBA: return MFVideoFormat_ABGR32; + case GST_VIDEO_FORMAT_RGB15: return MFVideoFormat_RGB555; + case GST_VIDEO_FORMAT_RGB16: return MFVideoFormat_RGB565; + case GST_VIDEO_FORMAT_AYUV: return MFVideoFormat_AYUV; + case GST_VIDEO_FORMAT_I420: return MFVideoFormat_I420; + case GST_VIDEO_FORMAT_NV12: return MFVideoFormat_NV12; + case GST_VIDEO_FORMAT_UYVY: return MFVideoFormat_UYVY; + case GST_VIDEO_FORMAT_YUY2: return MFVideoFormat_YUY2; + case GST_VIDEO_FORMAT_YV12: return MFVideoFormat_YV12; + case GST_VIDEO_FORMAT_YVYU: return MFVideoFormat_YVYU; + case GST_VIDEO_FORMAT_ENCODED: return MFVideoFormat_GStreamer; + default: return GUID_NULL; + } +} + +static void init_mf_video_format_from_gst_caps(const GstCaps *caps, const GUID *subtype, MFVIDEOFORMAT *format, + UINT32 format_size, UINT32 video_plane_align) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + gint width = 0, height = 0, num_value, den_value; + const gchar *str_value; + + memset(format, 0, format_size); + format->dwSize = format_size; + + if (subtype) + format->guidFormat = *subtype; + else if ((str_value = gst_structure_get_string(structure, "format"))) + { + GstVideoFormat video_format = gst_video_format_from_string(str_value); + format->guidFormat = subtype_from_gst_video_format(video_format); + } + + if (gst_structure_get_int(structure, "width", &width)) + format->videoInfo.dwWidth = (width + video_plane_align) & ~video_plane_align; + if (gst_structure_get_int(structure, "height", &height)) + format->videoInfo.dwHeight = (height + video_plane_align) & ~video_plane_align; + if (format->videoInfo.dwWidth != width || format->videoInfo.dwHeight != height) + { + format->videoInfo.MinimumDisplayAperture.Area.cx = width; + format->videoInfo.MinimumDisplayAperture.Area.cy = height; + } + format->videoInfo.GeometricAperture = format->videoInfo.MinimumDisplayAperture; + format->videoInfo.PanScanAperture = format->videoInfo.MinimumDisplayAperture; + + if (gst_structure_get_fraction(structure, "pixel-aspect-ratio", &num_value, &den_value)) + { + format->videoInfo.PixelAspectRatio.Numerator = num_value; + format->videoInfo.PixelAspectRatio.Denominator = den_value; + } + if (gst_structure_get_fraction(structure, "framerate", &num_value, &den_value)) + { + format->videoInfo.FramesPerSecond.Numerator = num_value; + format->videoInfo.FramesPerSecond.Denominator = den_value; + } +} + +static NTSTATUS video_format_from_gst_caps(const GstCaps *caps, const GUID *subtype, MFVIDEOFORMAT *format, + UINT32 *format_size, UINT32 video_plane_align) +{ + UINT32 capacity = *format_size, codec_data_size; + GstBuffer *codec_data = caps_get_buffer(caps, "codec_data", &codec_data_size); + + *format_size = sizeof(*format) + codec_data_size; + if (*format_size > capacity) + return STATUS_BUFFER_TOO_SMALL; + + init_mf_video_format_from_gst_caps(caps, subtype, format, *format_size, video_plane_align); + + if (codec_data) + gst_buffer_extract(codec_data, 0, format + 1, codec_data_size); + + GST_TRACE("subtype " WG_GUID_FORMAT " %ux%u, FPS " WG_RATIO_FORMAT ", aperture " WG_APERTURE_FORMAT ", " + "PAR " WG_RATIO_FORMAT ", videoFlags %#x.", + WG_GUID_ARGS(format->guidFormat), (int)format->videoInfo.dwWidth, (int)format->videoInfo.dwHeight, + WG_RATIO_ARGS(format->videoInfo.FramesPerSecond), WG_APERTURE_ARGS(format->videoInfo.MinimumDisplayAperture), + WG_RATIO_ARGS(format->videoInfo.PixelAspectRatio), (int)format->videoInfo.VideoFlags ); + if (format->dwSize > sizeof(*format)) GST_MEMDUMP("extra bytes:", (guint8 *)(format + 1), format->dwSize - sizeof(*format)); + + return STATUS_SUCCESS; +} + +static NTSTATUS wmv_video_format_from_gst_caps(const GstCaps *caps, MFVIDEOFORMAT *format, + UINT32 *format_size, UINT32 video_plane_align) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + gchar format_buffer[5] = {'W','M','V','0',0}; + const gchar *wmv_format_str; + gint wmv_version = 0; + const GUID *subtype; + + if (!(wmv_format_str = gst_structure_get_string(structure, "format"))) + { + if (!gst_structure_get_int(structure, "wmvversion", &wmv_version)) + GST_WARNING("Unable to get WMV format."); + format_buffer[3] += wmv_version; + wmv_format_str = format_buffer; + } + + if (!strcmp(wmv_format_str, "WMV1")) + subtype = &MFVideoFormat_WMV1; + else if (!strcmp(wmv_format_str, "WMV2")) + subtype = &MFVideoFormat_WMV2; + else if (!strcmp(wmv_format_str, "WMV3")) + subtype = &MFVideoFormat_WMV3; + else if (!strcmp(wmv_format_str, "WMVA")) + subtype = &MEDIASUBTYPE_WMVA; + else if (!strcmp(wmv_format_str, "WVC1")) + subtype = &MFVideoFormat_WVC1; + else + { + GST_WARNING("Unknown \"wmvversion\" value."); + return STATUS_INVALID_PARAMETER; + } + + return video_format_from_gst_caps(caps, subtype, format, format_size, video_plane_align); +} + +static NTSTATUS mpeg_video_format_from_gst_caps(const GstCaps *caps, struct mpeg_video_format *format, + UINT32 *format_size, UINT32 video_plane_align) +{ + UINT32 capacity = *format_size, codec_data_size; + GstBuffer *codec_data = caps_get_buffer(caps, "codec_data", &codec_data_size); + + *format_size = sizeof(*format) + codec_data_size; + if (*format_size > capacity) + return STATUS_BUFFER_TOO_SMALL; + + init_mf_video_format_from_gst_caps(caps, &MEDIASUBTYPE_MPEG1Payload, &format->hdr, *format_size, video_plane_align); + + if (codec_data) + { + gst_buffer_extract(codec_data, 0, format->sequence_header, codec_data_size); + format->sequence_header_count = codec_data_size; + } + + GST_TRACE("subtype " WG_GUID_FORMAT " %ux%u, FPS " WG_RATIO_FORMAT ", aperture " WG_APERTURE_FORMAT ", " + "PAR " WG_RATIO_FORMAT ", videoFlags %#x, start_time_code %u, profile %u, level %u, flags %#x.", + WG_GUID_ARGS(format->hdr.guidFormat), (int)format->hdr.videoInfo.dwWidth, (int)format->hdr.videoInfo.dwHeight, + WG_RATIO_ARGS(format->hdr.videoInfo.FramesPerSecond), WG_APERTURE_ARGS(format->hdr.videoInfo.MinimumDisplayAperture), + WG_RATIO_ARGS(format->hdr.videoInfo.PixelAspectRatio), (int)format->hdr.videoInfo.VideoFlags, format->start_time_code, + format->profile, format->level, format->flags ); + if (format->sequence_header_count) GST_MEMDUMP("extra bytes:", format->sequence_header, format->sequence_header_count); + + return STATUS_SUCCESS; +} + +static NTSTATUS other_video_format_from_gst_caps(const GstCaps *caps, MFVIDEOFORMAT *format, UINT32 *format_size, + UINT32 video_plane_align) +{ + gchar *str = gst_caps_to_string(caps); + GstCaps *copy = gst_caps_copy(caps); + gsize len = strlen(str) + 1; + GstBuffer *buffer; + + if (!(buffer = gst_buffer_new_and_alloc(len))) + return STATUS_NO_MEMORY; + gst_buffer_fill(buffer, 0, str, len); + gst_caps_set_simple(copy, "codec_data", GST_TYPE_BUFFER, buffer, NULL); + gst_buffer_unref(buffer); + + return video_format_from_gst_caps(copy, &MFVideoFormat_GStreamer, format, format_size, video_plane_align); +} + +NTSTATUS caps_to_media_type(GstCaps *caps, struct wg_media_type *media_type, UINT32 video_plane_align) +{ + const GstStructure *structure = gst_caps_get_structure(caps, 0); + const char *name = gst_structure_get_name(structure); + gboolean parsed; + + GST_TRACE("caps %"GST_PTR_FORMAT, caps); + + if (g_str_has_prefix(name, "audio/")) + { + media_type->major = MFMediaType_Audio; + + if (!strcmp(name, "audio/x-raw")) + return wave_format_from_gst_caps(caps, media_type->u.audio, &media_type->format_size); + if (!strcmp(name, "audio/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) + return mpeg_wave_format_from_gst_caps(caps, media_type->u.audio, &media_type->format_size); + if (!strcmp(name, "audio/x-wma")) + return wma_wave_format_from_gst_caps(caps, media_type->u.audio, &media_type->format_size); + + GST_FIXME("Using fallback for unknown audio caps %" GST_PTR_FORMAT ".", caps); + return other_wave_format_from_gst_caps(caps, media_type->u.format, &media_type->format_size); + } + else if (g_str_has_prefix(name, "video/")) + { + media_type->major = MFMediaType_Video; + + if (!strcmp(name, "video/x-raw")) + return video_format_from_gst_caps(caps, NULL, media_type->u.video, &media_type->format_size, video_plane_align); + if (!strcmp(name, "video/x-cinepak")) + return video_format_from_gst_caps(caps, &MFVideoFormat_CVID, media_type->u.video, &media_type->format_size, video_plane_align); + if (!strcmp(name, "video/x-h264")) + return video_format_from_gst_caps(caps, &MFVideoFormat_H264, media_type->u.video, &media_type->format_size, video_plane_align); + if (!strcmp(name, "video/x-wmv")) + return wmv_video_format_from_gst_caps(caps, media_type->u.video, &media_type->format_size, video_plane_align); + if (!strcmp(name, "video/mpeg") && gst_structure_get_boolean(structure, "parsed", &parsed) && parsed) + return mpeg_video_format_from_gst_caps(caps, media_type->u.format, &media_type->format_size, video_plane_align); + + GST_FIXME("Using fallback for unknown video caps %" GST_PTR_FORMAT ".", caps); + return other_video_format_from_gst_caps(caps, media_type->u.video, &media_type->format_size, video_plane_align); + } + else + { + GST_FIXME("Unhandled caps %" GST_PTR_FORMAT ".", caps); + return STATUS_UNSUCCESSFUL; + } + + return STATUS_SUCCESS; +} diff --git a/dlls/winegstreamer/wg_parser.c b/dlls/winegstreamer/wg_parser.c index 1cd44a90e6e..7a6e0de7ca2 100644 --- a/dlls/winegstreamer/wg_parser.c +++ b/dlls/winegstreamer/wg_parser.c @@ -120,13 +120,15 @@ struct wg_parser_stream GstPad *my_sink; GstElement *flip, *decodebin; GstSegment segment; - struct wg_format preferred_format, current_format, codec_format; + GstCaps *codec_caps; + GstCaps *current_caps; + GstCaps *desired_caps; pthread_cond_t event_cond, event_empty_cond; GstBuffer *buffer; GstMapInfo map_info; - bool flushing, eos, enabled, has_caps, has_tags, has_buffer, no_more_pads; + bool flushing, eos, enabled, has_tags, has_buffer, no_more_pads; uint64_t duration; gchar *tags[WG_PARSER_TAG_COUNT]; @@ -144,13 +146,17 @@ static struct wg_parser_stream *get_stream(wg_parser_stream_t stream) return (struct wg_parser_stream *)(ULONG_PTR)stream; } -static bool format_is_compressed(struct wg_format *format) +static bool caps_is_compressed(GstCaps *caps) { - return format->major_type != WG_MAJOR_TYPE_UNKNOWN - && format->major_type != WG_MAJOR_TYPE_VIDEO - && format->major_type != WG_MAJOR_TYPE_AUDIO - && format->major_type != WG_MAJOR_TYPE_VIDEO_ENCODED - && format->major_type != WG_MAJOR_TYPE_AUDIO_ENCODED; + struct wg_format format; + + if (!caps) + return false; + wg_format_from_caps(&format, caps); + + return format.major_type != WG_MAJOR_TYPE_UNKNOWN + && format.major_type != WG_MAJOR_TYPE_VIDEO + && format.major_type != WG_MAJOR_TYPE_AUDIO; } static NTSTATUS wg_parser_get_stream_count(void *args) @@ -235,36 +241,60 @@ static NTSTATUS wg_parser_push_data(void *args) return S_OK; } -static NTSTATUS wg_parser_stream_get_preferred_format(void *args) +static NTSTATUS wg_parser_stream_get_current_format(void *args) { - const struct wg_parser_stream_get_preferred_format_params *params = args; + const struct wg_parser_stream_get_current_format_params *params = args; + struct wg_parser_stream *stream = get_stream(params->stream); + + if (stream->current_caps) + wg_format_from_caps(params->format, stream->current_caps); + else + memset(params->format, 0, sizeof(*params->format)); - *params->format = get_stream(params->stream)->preferred_format; return S_OK; } +static NTSTATUS wg_parser_stream_get_current_type(void *args) +{ + struct wg_parser_stream_get_current_type_params *params = args; + struct wg_parser_stream *stream = get_stream(params->stream); + + if (!stream->current_caps) + return STATUS_INVALID_PARAMETER; + return caps_to_media_type(stream->current_caps, ¶ms->media_type, 0); +} + static NTSTATUS wg_parser_stream_get_codec_format(void *args) { struct wg_parser_stream_get_codec_format_params *params = args; struct wg_parser_stream *stream = get_stream(params->stream); + GST_TRACE("caps %" GST_PTR_FORMAT, stream->current_caps); + + if (stream->current_caps) { /* HACK: Return untranscoded codec format for transcoded stream. */ - struct wg_format untranscoded_format; + GstCaps *caps = gst_caps_copy(stream->current_caps); - untranscoded_format = stream->preferred_format; - if (get_untranscoded_stream_format(stream->parser->container, stream->number, &untranscoded_format)) + if (get_untranscoded_stream_format(stream->parser->container, stream->number, caps)) { - *params->format = untranscoded_format; + GST_TRACE("returning caps %" GST_PTR_FORMAT, caps); + wg_format_from_caps(params->format, caps); + gst_caps_unref(caps); return S_OK; } + gst_caps_unref(caps); GST_WARNING("Failed to get untranscoded codec format for stream %u.\n", stream->number); } - *params->format = format_is_compressed(&stream->codec_format) ? - stream->codec_format : - stream->preferred_format; + if (caps_is_compressed(stream->codec_caps)) + wg_format_from_caps(params->format, stream->codec_caps); + else if (stream->current_caps) + wg_format_from_caps(params->format, stream->current_caps); + else + memset(params->format, 0, sizeof(*params->format)); + return S_OK; } @@ -277,14 +307,38 @@ static NTSTATUS wg_parser_stream_enable(void *args) pthread_mutex_lock(&parser->mutex); - stream->current_format = *format; + stream->desired_caps = wg_format_to_caps(format); stream->enabled = true; pthread_mutex_unlock(&parser->mutex); if (format->major_type == WG_MAJOR_TYPE_VIDEO) { - bool flip = (params->flags & STREAM_ENABLE_FLAG_FLIP_RGB) && (format->u.video.height < 0); + bool flip = (format->u.video.height < 0); + + gst_util_set_object_arg(G_OBJECT(stream->flip), "method", flip ? "vertical-flip" : "none"); + } + + push_event(stream->my_sink, gst_event_new_reconfigure()); + return S_OK; +} + +static NTSTATUS wg_parser_stream_enable_type(void *args) +{ + const struct wg_parser_stream_enable_type_params *params = args; + struct wg_parser_stream *stream = get_stream(params->stream); + struct wg_parser *parser = stream->parser; + + pthread_mutex_lock(&parser->mutex); + + stream->desired_caps = caps_from_media_type(¶ms->media_type); + stream->enabled = true; + + pthread_mutex_unlock(&parser->mutex); + + if (IsEqualGUID(¶ms->media_type.major, &MEDIATYPE_Video) && stream->flip) + { + bool flip = !!(params->media_type.u.video->videoInfo.VideoFlags & MFVideoFlag_BottomUpLinearRep); gst_util_set_object_arg(G_OBJECT(stream->flip), "method", flip ? "vertical-flip" : "none"); } @@ -300,7 +354,11 @@ static NTSTATUS wg_parser_stream_disable(void *args) pthread_mutex_lock(&parser->mutex); stream->enabled = false; - stream->current_format.major_type = WG_MAJOR_TYPE_UNKNOWN; + if (stream->desired_caps) + { + gst_caps_unref(stream->desired_caps); + stream->desired_caps = NULL; + } pthread_mutex_unlock(&parser->mutex); pthread_cond_signal(&stream->event_cond); pthread_cond_signal(&stream->event_empty_cond); @@ -538,11 +596,48 @@ static bool parser_no_more_pads(struct wg_parser *parser) static gboolean autoplug_continue_cb(GstElement * decodebin, GstPad *pad, GstCaps * caps, gpointer user) { - struct wg_format format; + const GstStructure *structure = gst_caps_get_structure(caps, 0); + const char *name = gst_structure_get_name(structure); + struct wg_parser *parser = user; + GstElementFactory *factory; + GstElement *element; + gboolean parsed; + const char *sgi; - wg_format_from_caps(&format, caps); + if ((sgi = getenv("SteamGameId")) && (!strcmp(sgi, "1083650") || !strcmp(sgi, "1097880"))) + return true; + + if (!parser->output_compressed) + return true; + + /* make sure to autoplug parsers for mpeg audio / video */ + if (!strcmp(name, "audio/mpeg") || !strcmp(name, "video/mpeg")) + { + if (!gst_structure_get_boolean(structure, "parsed", &parsed)) + return true; + return !parsed; + } - return !format_is_compressed(&format); + gst_object_ref(pad); + while (GST_IS_GHOST_PAD(pad)) + { + GstGhostPad *ghost = GST_GHOST_PAD(pad); + pad = gst_ghost_pad_get_target(ghost); + gst_object_unref(ghost); + } + + element = gst_pad_get_parent_element(pad); + gst_object_unref(pad); + + factory = gst_element_get_factory(element); + gst_object_unref(element); + + name = gst_plugin_feature_get_name(GST_PLUGIN_FEATURE(factory)); + if (!strcmp(name, "protonvideoconverter") || !strcmp(name, "protonaudioconverter")) + return true; + + GST_TRACE("factory %"GST_PTR_FORMAT" element %"GST_PTR_FORMAT" pad %"GST_PTR_FORMAT" caps %"GST_PTR_FORMAT"", factory, element, pad, caps); + return !factory || !gst_element_factory_list_is_type(factory, GST_ELEMENT_FACTORY_TYPE_DEMUXER); } gboolean caps_detect_h264(GstCapsFeatures *features, GstStructure *structure, gpointer user_data) @@ -770,8 +865,7 @@ static gboolean sink_event_cb(GstPad *pad, GstObject *parent, GstEvent *event) gst_event_parse_caps(event, &caps); pthread_mutex_lock(&parser->mutex); - wg_format_from_caps(&stream->preferred_format, caps); - stream->has_caps = true; + stream->current_caps = gst_caps_ref(caps); pthread_mutex_unlock(&parser->mutex); pthread_cond_signal(&parser->init_cond); break; @@ -866,11 +960,12 @@ static gboolean sink_query_cb(GstPad *pad, GstObject *parent, GstQuery *query) gst_query_parse_caps(query, &filter); pthread_mutex_lock(&parser->mutex); - caps = wg_format_to_caps(&stream->current_format); - pthread_mutex_unlock(&parser->mutex); - - if (!caps) + if (!stream->desired_caps || !(caps = gst_caps_copy(stream->desired_caps))) + { + pthread_mutex_unlock(&parser->mutex); return FALSE; + } + pthread_mutex_unlock(&parser->mutex); /* Clear some fields that shouldn't prevent us from connecting. */ for (i = 0; i < gst_caps_get_size(caps); ++i) @@ -893,13 +988,13 @@ static gboolean sink_query_cb(GstPad *pad, GstObject *parent, GstQuery *query) case GST_QUERY_ACCEPT_CAPS: { - struct wg_format format; + struct wg_format format, current_format; gboolean ret = TRUE; GstCaps *caps; pthread_mutex_lock(&parser->mutex); - if (stream->current_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (!stream->desired_caps) { pthread_mutex_unlock(&parser->mutex); gst_query_set_accept_caps_result(query, TRUE); @@ -908,7 +1003,8 @@ static gboolean sink_query_cb(GstPad *pad, GstObject *parent, GstQuery *query) gst_query_parse_accept_caps(query, &caps); wg_format_from_caps(&format, caps); - ret = wg_format_compare(&format, &stream->current_format); + wg_format_from_caps(¤t_format, stream->desired_caps); + ret = wg_format_compare(&format, ¤t_format); pthread_mutex_unlock(&parser->mutex); @@ -942,7 +1038,6 @@ static struct wg_parser_stream *create_stream(struct wg_parser *parser, char *id stream->parser = parser; stream->number = parser->stream_count; stream->no_more_pads = true; - stream->current_format.major_type = WG_MAJOR_TYPE_UNKNOWN; pthread_cond_init(&stream->event_cond, NULL); pthread_cond_init(&stream->event_empty_cond, NULL); @@ -1183,7 +1278,6 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) { struct wg_parser_stream *stream; struct wg_parser *parser = user; - GstCaps *caps; GST_LOG("parser %p, element %p, pad %p.", parser, element, pad); @@ -1192,13 +1286,10 @@ static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) if (!(stream = create_stream(parser, gst_pad_get_stream_id(pad)))) return; - - caps = gst_pad_query_caps(pad, NULL); - wg_format_from_caps(&stream->codec_format, caps); - gst_caps_unref(caps); + stream->codec_caps = gst_pad_query_caps(pad, NULL); /* For compressed stream, create an extra decodebin to decode it. */ - if (!parser->output_compressed && format_is_compressed(&stream->codec_format)) + if (!parser->output_compressed && caps_is_compressed(stream->codec_caps)) { if (!stream_decodebin_create(stream)) { @@ -1895,7 +1986,7 @@ static NTSTATUS wg_parser_connect(void *args) gint64 duration; /* If we received a buffer, waiting for tags or caps does not make sense anymore. */ - while ((!stream->has_caps || !stream->has_tags) && !parser->error && !stream->has_buffer) + while ((!stream->current_caps || !stream->has_tags) && !parser->error && !stream->has_buffer) pthread_cond_wait(&parser->init_cond, &parser->mutex); /* GStreamer doesn't actually provide any guarantees about when duration @@ -2095,6 +2186,7 @@ static BOOL uridecodebin_parser_init_gst(struct wg_parser *parser) if (!(element = create_element("uridecodebin", "base"))) return FALSE; + parser->output_compressed = true; gst_bin_add(GST_BIN(parser->container), element); parser->decodebin = element; @@ -2102,6 +2194,7 @@ static BOOL uridecodebin_parser_init_gst(struct wg_parser *parser) g_object_set(parser->decodebin, "uri", parser->uri, NULL); g_signal_connect(element, "pad-added", G_CALLBACK(pad_added_cb), parser); g_signal_connect(element, "pad-removed", G_CALLBACK(pad_removed_cb), parser); + g_signal_connect(element, "autoplug-continue", G_CALLBACK(autoplug_continue_cb), parser); g_signal_connect(element, "autoplug-select", G_CALLBACK(autoplug_select_cb), parser); g_signal_connect(element, "autoplug-sort", G_CALLBACK(autoplug_sort_cb), parser); g_signal_connect(element, "no-more-pads", G_CALLBACK(no_more_pads_cb), parser); @@ -2248,9 +2341,11 @@ const unixlib_entry_t __wine_unix_call_funcs[] = X(wg_parser_get_stream_count), X(wg_parser_get_stream), - X(wg_parser_stream_get_preferred_format), + X(wg_parser_stream_get_current_format), + X(wg_parser_stream_get_current_type), X(wg_parser_stream_get_codec_format), X(wg_parser_stream_enable), + X(wg_parser_stream_enable_type), X(wg_parser_stream_disable), X(wg_parser_stream_get_buffer), @@ -2270,13 +2365,14 @@ const unixlib_entry_t __wine_unix_call_funcs[] = X(wg_source_set_position), X(wg_source_push_data), X(wg_source_read_data), - X(wg_source_get_stream_format), + X(wg_source_get_stream_type), X(wg_source_get_stream_tag), X(wg_source_set_stream_flags), X(wg_transform_create), X(wg_transform_destroy), - X(wg_transform_set_output_format), + X(wg_transform_get_output_type), + X(wg_transform_set_output_type), X(wg_transform_push_data), X(wg_transform_read_data), @@ -2300,6 +2396,13 @@ C_ASSERT(ARRAYSIZE(__wine_unix_call_funcs) == unix_wg_funcs_count); typedef ULONG PTR32; +struct wg_media_type32 +{ + GUID major; + UINT32 format_size; + PTR32 format; +}; + static NTSTATUS wow64_wg_parser_connect(void *args) { struct @@ -2335,20 +2438,45 @@ static NTSTATUS wow64_wg_parser_push_data(void *args) { return wg_parser_push_data(¶ms); } -static NTSTATUS wow64_wg_parser_stream_get_preferred_format(void *args) +static NTSTATUS wow64_wg_parser_stream_get_current_format(void *args) { struct { wg_parser_stream_t stream; PTR32 format; } *params32 = args; - struct wg_parser_stream_get_preferred_format_params params = + struct wg_parser_stream_get_current_format_params params = { .stream = params32->stream, .format = ULongToPtr(params32->format), }; - return wg_parser_stream_get_preferred_format(¶ms); + return wg_parser_stream_get_current_format(¶ms); +} + +NTSTATUS wow64_wg_parser_stream_get_current_type(void *args) +{ + struct + { + wg_parser_stream_t stream; + struct wg_media_type32 media_type; + } *params32 = args; + struct wg_parser_stream_get_current_type_params params = + { + .stream = params32->stream, + .media_type = + { + .major = params32->media_type.major, + .format_size = params32->media_type.format_size, + .u.format = ULongToPtr(params32->media_type.format), + }, + }; + NTSTATUS status; + + status = wg_parser_stream_get_current_type(¶ms); + params32->media_type.major = params.media_type.major; + params32->media_type.format_size = params.media_type.format_size; + return status; } static NTSTATUS wow64_wg_parser_stream_get_codec_format(void *args) @@ -2383,6 +2511,27 @@ static NTSTATUS wow64_wg_parser_stream_enable(void *args) return wg_parser_stream_enable(¶ms); } +NTSTATUS wow64_wg_parser_stream_enable_type(void *args) +{ + struct + { + wg_parser_stream_t stream; + struct wg_media_type32 media_type; + } *params32 = args; + struct wg_parser_stream_enable_type_params params = + { + .stream = params32->stream, + .media_type = + { + .major = params32->media_type.major, + .format_size = params32->media_type.format_size, + .u.format = ULongToPtr(params32->media_type.format), + }, + }; + + return wg_parser_stream_enable_type(¶ms); +} + static NTSTATUS wow64_wg_parser_stream_get_buffer(void *args) { struct @@ -2499,6 +2648,33 @@ NTSTATUS wow64_wg_source_read_data(void *args) return wg_source_read_data(¶ms); } +NTSTATUS wow64_wg_source_get_stream_type(void *args) +{ + struct + { + wg_source_t source; + UINT32 index; + struct wg_media_type32 media_type; + } *params32 = args; + struct wg_source_get_stream_type_params params = + { + .source = params32->source, + .index = params32->index, + .media_type = + { + .major = params32->media_type.major, + .format_size = params32->media_type.format_size, + .u.format = ULongToPtr(params32->media_type.format), + }, + }; + NTSTATUS status; + + status = wg_source_get_stream_type(¶ms); + params32->media_type.major = params.media_type.major; + params32->media_type.format_size = params.media_type.format_size; + return status; +} + NTSTATUS wow64_wg_source_get_stream_tag(void *args) { struct @@ -2526,15 +2702,25 @@ NTSTATUS wow64_wg_transform_create(void *args) struct { wg_transform_t transform; - PTR32 input_format; - PTR32 output_format; - PTR32 attrs; + struct wg_media_type32 input_type; + struct wg_media_type32 output_type; + struct wg_transform_attrs attrs; } *params32 = args; struct wg_transform_create_params params = { - .input_format = ULongToPtr(params32->input_format), - .output_format = ULongToPtr(params32->output_format), - .attrs = ULongToPtr(params32->attrs), + .input_type = + { + .major = params32->input_type.major, + .format_size = params32->input_type.format_size, + .u.format = ULongToPtr(params32->input_type.format), + }, + .output_type = + { + .major = params32->output_type.major, + .format_size = params32->output_type.format_size, + .u.format = ULongToPtr(params32->output_type.format), + }, + .attrs = params32->attrs, }; NTSTATUS ret; @@ -2543,19 +2729,49 @@ NTSTATUS wow64_wg_transform_create(void *args) return ret; } -NTSTATUS wow64_wg_transform_set_output_format(void *args) +NTSTATUS wow64_wg_transform_get_output_type(void *args) { struct { wg_transform_t transform; - PTR32 format; + struct wg_media_type32 media_type; } *params32 = args; - struct wg_transform_set_output_format_params params = + struct wg_transform_get_output_type_params params = { .transform = params32->transform, - .format = ULongToPtr(params32->format), + .media_type = + { + .major = params32->media_type.major, + .format_size = params32->media_type.format_size, + .u.format = ULongToPtr(params32->media_type.format), + }, }; - return wg_transform_set_output_format(¶ms); + NTSTATUS status; + + status = wg_transform_get_output_type(¶ms); + params32->media_type.major = params.media_type.major; + params32->media_type.format_size = params.media_type.format_size; + return status; +} + +NTSTATUS wow64_wg_transform_set_output_type(void *args) +{ + struct + { + wg_transform_t transform; + struct wg_media_type32 media_type; + } *params32 = args; + struct wg_transform_set_output_type_params params = + { + .transform = params32->transform, + .media_type = + { + .major = params32->media_type.major, + .format_size = params32->media_type.format_size, + .u.format = ULongToPtr(params32->media_type.format), + }, + }; + return wg_transform_set_output_type(¶ms); } NTSTATUS wow64_wg_transform_push_data(void *args) @@ -2584,14 +2800,12 @@ NTSTATUS wow64_wg_transform_read_data(void *args) { wg_transform_t transform; PTR32 sample; - PTR32 format; HRESULT result; } *params32 = args; struct wg_transform_read_data_params params = { .transform = params32->transform, .sample = ULongToPtr(params32->sample), - .format = ULongToPtr(params32->format), }; NTSTATUS ret; @@ -2693,9 +2907,11 @@ const unixlib_entry_t __wine_unix_call_wow64_funcs[] = X(wg_parser_get_stream_count), X(wg_parser_get_stream), - X64(wg_parser_stream_get_preferred_format), + X64(wg_parser_stream_get_current_format), + X64(wg_parser_stream_get_current_type), X64(wg_parser_stream_get_codec_format), X64(wg_parser_stream_enable), + X64(wg_parser_stream_enable_type), X(wg_parser_stream_disable), X64(wg_parser_stream_get_buffer), @@ -2714,12 +2930,13 @@ const unixlib_entry_t __wine_unix_call_wow64_funcs[] = X(wg_source_get_position), X64(wg_source_push_data), X64(wg_source_read_data), - X(wg_source_get_stream_format), + X64(wg_source_get_stream_type), X64(wg_source_get_stream_tag), X64(wg_transform_create), X(wg_transform_destroy), - X64(wg_transform_set_output_format), + X64(wg_transform_get_output_type), + X64(wg_transform_set_output_type), X64(wg_transform_push_data), X64(wg_transform_read_data), diff --git a/dlls/winegstreamer/wg_sample.c b/dlls/winegstreamer/wg_sample.c index 513907d9d77..00f0187afca 100644 --- a/dlls/winegstreamer/wg_sample.c +++ b/dlls/winegstreamer/wg_sample.c @@ -305,13 +305,6 @@ void wg_sample_queue_destroy(struct wg_sample_queue *queue) free(queue); } -/* These unixlib entry points should not be used directly, they assume samples - * to be queued and zero-copy support, use the helpers below instead. - */ -HRESULT wg_transform_push_data(wg_transform_t transform, struct wg_sample *sample); -HRESULT wg_transform_read_data(wg_transform_t transform, struct wg_sample *sample, - struct wg_format *format); - HRESULT wg_transform_push_mf(wg_transform_t transform, IMFSample *sample, struct wg_sample_queue *queue) { @@ -348,23 +341,21 @@ HRESULT wg_transform_push_mf(wg_transform_t transform, IMFSample *sample, } HRESULT wg_transform_read_mf(wg_transform_t transform, IMFSample *sample, - DWORD sample_size, struct wg_format *format, DWORD *flags) + DWORD sample_size, DWORD *flags) { struct wg_sample *wg_sample; IMFMediaBuffer *buffer; HRESULT hr; - TRACE_(mfplat)("transform %#I64x, sample %p, format %p, flags %p.\n", transform, sample, format, flags); + TRACE_(mfplat)("transform %#I64x, sample %p, flags %p.\n", transform, sample, flags); if (FAILED(hr = wg_sample_create_mf(sample, &wg_sample))) return hr; wg_sample->size = 0; - if (FAILED(hr = wg_transform_read_data(transform, wg_sample, format))) + if (FAILED(hr = wg_transform_read_data(transform, wg_sample))) { - if (hr == MF_E_TRANSFORM_STREAM_CHANGE && !format) - FIXME("Unexpected stream format change!\n"); wg_sample_release(wg_sample); return hr; } @@ -432,7 +423,7 @@ HRESULT wg_transform_read_quartz(wg_transform_t transform, struct wg_sample *wg_ TRACE_(mfplat)("transform %#I64x, wg_sample %p.\n", transform, wg_sample); - if (FAILED(hr = wg_transform_read_data(transform, wg_sample, NULL))) + if (FAILED(hr = wg_transform_read_data(transform, wg_sample))) { if (hr == MF_E_TRANSFORM_STREAM_CHANGE) FIXME("Unexpected stream format change!\n"); @@ -507,7 +498,7 @@ HRESULT wg_transform_read_dmo(wg_transform_t transform, DMO_OUTPUT_DATA_BUFFER * return hr; wg_sample->size = 0; - if (FAILED(hr = wg_transform_read_data(transform, wg_sample, NULL))) + if (FAILED(hr = wg_transform_read_data(transform, wg_sample))) { if (hr == MF_E_TRANSFORM_STREAM_CHANGE) TRACE_(mfplat)("Stream format changed.\n"); diff --git a/dlls/winegstreamer/wg_source.c b/dlls/winegstreamer/wg_source.c index 6e000e36e31..e81eb1eb596 100644 --- a/dlls/winegstreamer/wg_source.c +++ b/dlls/winegstreamer/wg_source.c @@ -44,6 +44,7 @@ struct source_stream { GstPad *pad; + GstStream *stream; GstAtomicQueue *queue; GstBuffer *buffer; gboolean eos; @@ -54,9 +55,12 @@ struct wg_source gchar *url; GstPad *src_pad; GstElement *container; + bool needs_transcoding; + GstSegment segment; - bool valid_segment; guint64 max_duration; + GstAtomicQueue *seek_queue; + pthread_t push_thread; guint stream_count; struct source_stream streams[WG_SOURCE_MAX_STREAMS]; @@ -67,6 +71,14 @@ static struct wg_source *get_source(wg_source_t source) return (struct wg_source *)(ULONG_PTR)source; } +static struct source_stream *source_stream_from_pad(struct wg_source *source, GstPad *pad) +{ + struct source_stream *stream, *end; + for (stream = source->streams, end = stream + source->stream_count; stream != end; stream++) + if (stream->pad == pad) return stream; + return NULL; +} + static const char *media_type_from_caps(GstCaps *caps) { GstStructure *structure; @@ -80,7 +92,6 @@ static GstCaps *detect_caps_from_data(const char *url, const void *data, guint s const char *extension = url ? strrchr(url, '.') : NULL; GstTypeFindProbability probability; GstCaps *caps; - gchar *str; if (!(caps = gst_type_find_helper_for_data_with_extension(NULL, data, size, extension ? extension + 1 : NULL, &probability))) @@ -89,14 +100,12 @@ static GstCaps *detect_caps_from_data(const char *url, const void *data, guint s return NULL; } - str = gst_caps_to_string(caps); if (probability > GST_TYPE_FIND_POSSIBLE) - GST_INFO("Detected caps %s with probability %u for url %s, data %p, size %u", - str, probability, url, data, size); + GST_INFO("Detected caps %"GST_PTR_FORMAT" with probability %u for url %s, data %p, size %u", + caps, probability, url, data, size); else - GST_FIXME("Detected caps %s with probability %u for url %s, data %p, size %u", - str, probability, url, data, size); - g_free(str); + GST_FIXME("Detected caps %"GST_PTR_FORMAT" with probability %u for url %s, data %p, size %u", + caps, probability, url, data, size); return caps; } @@ -116,7 +125,7 @@ static GstPad *create_pad_with_caps(GstPadDirection direction, GstCaps *caps) return pad; } -static GstBuffer *create_buffer_from_bytes(const void *data, guint size) +static GstBuffer *create_buffer_from_bytes(UINT64 offset, const void *data, guint size) { GstBuffer *buffer; @@ -126,6 +135,8 @@ static GstBuffer *create_buffer_from_bytes(const void *data, guint size) { gst_buffer_fill(buffer, 0, data, size); gst_buffer_set_size(buffer, size); + GST_BUFFER_OFFSET(buffer) = offset; + GST_BUFFER_OFFSET_END(buffer) = offset + size; } return buffer; @@ -133,7 +144,9 @@ static GstBuffer *create_buffer_from_bytes(const void *data, guint size) static GstStream *source_get_stream(struct wg_source *source, guint index) { - return index >= source->stream_count ? NULL : gst_pad_get_stream(source->streams[index].pad); + if (index >= source->stream_count) + return NULL; + return gst_object_ref(source->streams[index].stream); } static GstCaps *source_get_stream_caps(struct wg_source *source, guint index) @@ -168,17 +181,6 @@ static bool source_set_stream_flags(struct wg_source *source, guint index, GstSt return true; } -static GstStreamFlags source_get_stream_flags(struct wg_source *source, guint index) -{ - GstStreamFlags flags; - GstStream *stream; - if (!(stream = source_get_stream(source, index))) - return 0; - flags = gst_stream_get_stream_flags(stream); - gst_object_unref(stream); - return flags; -} - static NTSTATUS source_get_stream_buffer(struct wg_source *source, guint index, GstBuffer **buffer) { GstBuffer **stream_buffer; @@ -193,52 +195,93 @@ static NTSTATUS source_get_stream_buffer(struct wg_source *source, guint index, return STATUS_SUCCESS; } -static gboolean src_event_seek(struct wg_source *source, GstEvent *event) +static GstEvent *create_stream_start_event(const char *stream_id) +{ + GstStream *stream; + GstEvent *event; + + stream = gst_stream_new(stream_id, NULL, GST_STREAM_TYPE_UNKNOWN, 0); + if ((event = gst_event_new_stream_start(stream_id))) + { + gst_event_set_stream(event, stream); + gst_event_set_stream_flags(event, GST_STREAM_FLAG_SELECT); + gst_event_set_group_id(event, 1); + } + + return event; +} + +static void source_handle_seek(struct wg_source *source, GstEvent *event) { guint32 i, seqnum = gst_event_get_seqnum(event); - GstSeekType cur_type, stop_type; GstSeekFlags flags; - GstFormat format; - gint64 cur, stop; - gdouble rate; - - gst_event_parse_seek(event, &rate, &format, &flags, &cur_type, &cur, &stop_type, &stop); - gst_event_unref(event); - if (format != GST_FORMAT_BYTES) - return false; + gboolean eos; + gint64 cur; - GST_TRACE("source %p, rate %f, format %s, flags %#x, cur_type %u, cur %#" G_GINT64_MODIFIER "x, " - "stop_type %u, stop %#" G_GINT64_MODIFIER "x.", source, rate, gst_format_get_name(format), - flags, cur_type, cur, stop_type, stop); + gst_event_parse_seek(event, NULL, NULL, &flags, NULL, &cur, NULL, NULL); if (flags & GST_SEEK_FLAG_FLUSH) { - if (!(event = gst_event_new_flush_start())) - GST_ERROR("Failed to allocate flush_start event"); - else + if ((event = gst_event_new_flush_start())) { gst_event_set_seqnum(event, seqnum); - if (!gst_pad_push_event(source->src_pad, event)) - GST_ERROR("Failed to push flush_start event"); + push_event(source->src_pad, event); } } - source->segment.start = cur; - - for (i = 0; i < ARRAY_SIZE(source->streams); i++) - source->streams[i].eos = false; + if ((eos = cur >= source->segment.stop)) + source->segment.start = source->segment.stop; + else + { + for (i = 0; i < ARRAY_SIZE(source->streams); i++) + source->streams[i].eos = false; + source->segment.start = cur; + } if (flags & GST_SEEK_FLAG_FLUSH) { - if (!(event = gst_event_new_flush_stop(true))) - GST_ERROR("Failed to allocate flush_stop event"); - else + if ((event = gst_event_new_flush_stop(true))) { gst_event_set_seqnum(event, seqnum); - if (!gst_pad_push_event(source->src_pad, event)) - GST_ERROR("Failed to push flush_stop event"); + push_event(source->src_pad, event); + } + + if ((event = gst_event_new_segment(&source->segment))) + { + gst_event_set_seqnum(event, seqnum); + push_event(source->src_pad, event); } - source->valid_segment = false; + } + + if (source->segment.start == source->segment.stop) + push_event(source->src_pad, gst_event_new_eos()); +} + +static gboolean src_event_seek(struct wg_source *source, GstEvent *event) +{ + GstFormat format; + + GST_TRACE("source %p, %"GST_PTR_FORMAT, source, event); + + gst_event_parse_seek(event, NULL, &format, NULL, NULL, NULL, NULL, NULL); + if (format != GST_FORMAT_BYTES) + { + gst_event_unref(event); + return false; + } + + /* Even in push mode, oggdemux uses a separate thread to request seeks, we have to handle + * these asynchronously from wg_source_get_position. + * On the other hand, other demuxers emit seeks synchronously during gst_pad_push_buffer, + * and expect to see flush events being pushed synchronously as well, we have to handle + * these directly here. + */ + if (source->push_thread != pthread_self()) + gst_atomic_queue_push(source->seek_queue, event); + else + { + source_handle_seek(source, event); + gst_event_unref(event); } return true; @@ -261,8 +304,9 @@ static gboolean src_query_duration(struct wg_source *source, GstQuery *query) { GstFormat format; + GST_TRACE("source %p, %"GST_PTR_FORMAT, source, query); + gst_query_parse_duration(query, &format, NULL); - GST_TRACE("source %p, format %s", source, gst_format_get_name(format)); if (format != GST_FORMAT_BYTES) return false; @@ -272,7 +316,8 @@ static gboolean src_query_duration(struct wg_source *source, GstQuery *query) static gboolean src_query_scheduling(struct wg_source *source, GstQuery *query) { - GST_TRACE("source %p", source); + GST_TRACE("source %p, %"GST_PTR_FORMAT, source, query); + gst_query_set_scheduling(query, GST_SCHEDULING_FLAG_SEEKABLE, 1, -1, 0); gst_query_add_scheduling_mode(query, GST_PAD_MODE_PUSH); return true; @@ -282,8 +327,9 @@ static gboolean src_query_seeking(struct wg_source *source, GstQuery *query) { GstFormat format; + GST_TRACE("source %p, %"GST_PTR_FORMAT, source, query); + gst_query_parse_seeking(query, &format, NULL, NULL, NULL); - GST_TRACE("source %p, format %s", source, gst_format_get_name(format)); if (format != GST_FORMAT_BYTES) return false; @@ -295,8 +341,9 @@ static gboolean src_query_uri(struct wg_source *source, GstQuery *query) { gchar *uri; + GST_TRACE("source %p, %"GST_PTR_FORMAT, source, query); + gst_query_parse_uri(query, &uri); - GST_TRACE("source %p, uri %s", source, uri); gst_query_set_uri(query, source->url); return true; @@ -326,87 +373,143 @@ static gboolean src_query_cb(GstPad *pad, GstObject *parent, GstQuery *query) static GstFlowReturn sink_chain_cb(GstPad *pad, GstObject *parent, GstBuffer *buffer) { struct wg_source *source = gst_pad_get_element_private(pad); - guint index; + struct source_stream *stream = source_stream_from_pad(source, pad); - GST_TRACE("source %p, pad %p, buffer %p.", source, pad, buffer); + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, buffer); - for (index = 0; index < source->stream_count; index++) - if (source->streams[index].pad == pad) - break; - - if (source_get_stream_flags(source, index) & GST_STREAM_FLAG_SELECT) - gst_atomic_queue_push(source->streams[index].queue, buffer); + if (gst_stream_get_stream_flags(stream->stream) & GST_STREAM_FLAG_SELECT + && gst_buffer_get_size(buffer) > 0) + gst_atomic_queue_push(stream->queue, buffer); else gst_buffer_unref(buffer); return GST_FLOW_OK; } +static gboolean check_decoding_support(GstCaps *caps) +{ + GstElement *element, *bin = gst_bin_new("decode-test"), *first = NULL, *last = NULL; + GstPad *peer, *src_pad, *sink_pad; + GstPadTemplate *template; + gboolean ret = false; + + template = gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, caps); + src_pad = gst_pad_new_from_template(template, "src"); + g_object_unref(template); + + template = gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, GST_CAPS_ANY); + sink_pad = gst_pad_new_from_template(template, "sink"); + g_object_unref(template); + + if (!(element = find_element(GST_ELEMENT_FACTORY_TYPE_DECODER, caps, GST_CAPS_ANY)) + || !append_element(bin, element, &first, &last)) + goto done; + gst_util_set_object_arg(G_OBJECT(element), "max-threads", "1"); + gst_util_set_object_arg(G_OBJECT(element), "n-threads", "1"); + + if (!link_src_to_element(src_pad, first) || !link_element_to_sink(last, sink_pad) + || !gst_pad_set_active(src_pad, 1) || !gst_pad_set_active(sink_pad, 1)) + goto done; + + gst_element_set_state(bin, GST_STATE_PAUSED); + if (!gst_element_get_state(bin, NULL, NULL, -1) + || !push_event(src_pad, gst_event_new_stream_start("stream")) + || !push_event(src_pad, gst_event_new_caps(caps))) + goto done; + + /* check that the caps event has been accepted */ + if ((peer = gst_pad_get_peer(src_pad))) + { + GST_ERROR("pad %"GST_PTR_FORMAT, peer); + ret = gst_pad_has_current_caps(peer); + gst_object_unref(peer); + } + +done: + gst_element_set_state(bin, GST_STATE_NULL); + gst_object_unref(src_pad); + gst_object_unref(bin); + return ret; +} + static gboolean sink_event_caps(struct wg_source *source, GstPad *pad, GstEvent *event) { - GstStream *stream; + struct source_stream *stream = source_stream_from_pad(source, pad); + const char *mime_type; GstCaps *caps; - gchar *str; + + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, event); gst_event_parse_caps(event, &caps); - str = gst_caps_to_string(caps); - GST_TRACE("source %p, pad %p, caps %s", source, pad, str); - g_free(str); + mime_type = gst_structure_get_name(gst_caps_get_structure(caps, 0)); - if ((stream = gst_pad_get_stream(pad))) + if (strcmp(mime_type, "audio/x-raw") && strcmp(mime_type, "video/x-raw") + && !check_decoding_support(caps)) { - gst_stream_set_caps(stream, gst_caps_copy(caps)); - gst_stream_set_stream_type(stream, stream_type_from_caps(caps)); - gst_object_unref(stream); + GST_ERROR("Cannot decode caps %"GST_PTR_FORMAT, caps); + source->needs_transcoding = true; + gst_event_unref(event); + return false; } + gst_stream_set_caps(stream->stream, gst_caps_copy(caps)); + gst_stream_set_stream_type(stream->stream, stream_type_from_caps(caps)); + gst_event_unref(event); - return !!stream; + return true; } static gboolean sink_event_tag(struct wg_source *source, GstPad *pad, GstEvent *event) { - GstTagList *new_tags; - GstStream *stream; + struct source_stream *stream = source_stream_from_pad(source, pad); + GstTagList *new_tags, *old_tags = gst_stream_get_tags(stream->stream); + + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, event); gst_event_parse_tag(event, &new_tags); - GST_TRACE("source %p, pad %p, new_tags %p", source, pad, new_tags); - if ((stream = gst_pad_get_stream(pad))) + if ((new_tags = gst_tag_list_merge(old_tags, new_tags, GST_TAG_MERGE_REPLACE))) { - GstTagList *old_tags = gst_stream_get_tags(stream); - if ((new_tags = gst_tag_list_merge(old_tags, new_tags, GST_TAG_MERGE_REPLACE))) - { - gst_stream_set_tags(stream, new_tags); - gst_tag_list_unref(new_tags); - } - if (old_tags) - gst_tag_list_unref(old_tags); - gst_object_unref(stream); + gst_stream_set_tags(stream->stream, new_tags); + gst_tag_list_unref(new_tags); } + if (old_tags) + gst_tag_list_unref(old_tags); gst_event_unref(event); - return stream && new_tags; + return true; } static gboolean sink_event_stream_start(struct wg_source *source, GstPad *pad, GstEvent *event) { + struct source_stream *stream = source_stream_from_pad(source, pad); + const gchar *new_id, *old_id = gst_stream_get_stream_id(stream->stream); + GstStream *new_stream, *old_stream = stream->stream; guint group, flags; - GstStream *stream; gint64 duration; - const gchar *id; - gst_event_parse_stream_start(event, &id); - gst_event_parse_stream(event, &stream); + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, event); + + gst_event_parse_stream_start(event, &new_id); + gst_event_parse_stream(event, &new_stream); gst_event_parse_stream_flags(event, &flags); if (!gst_event_parse_group_id(event, &group)) group = -1; + if (strcmp(old_id, new_id)) + { + if (!(stream->stream = new_stream)) + stream->stream = gst_stream_new(new_id, NULL, GST_STREAM_TYPE_UNKNOWN, 0); + else + gst_object_ref(stream->stream); + gst_object_unref(old_stream); + } + if (gst_pad_peer_query_duration(pad, GST_FORMAT_TIME, &duration) && GST_CLOCK_TIME_IS_VALID(duration)) + { source->max_duration = max(source->max_duration, duration); - - GST_TRACE("source %p, pad %p, stream %p, id %s, flags %#x, group %d, duration %" GST_TIME_FORMAT, - source, pad, stream, id, flags, group, GST_TIME_ARGS(duration)); + GST_TRACE("source %p, %"GST_PTR_FORMAT", got duration %" GST_TIME_FORMAT, source, pad, GST_TIME_ARGS(duration)); + } gst_event_unref(event); return true; @@ -414,16 +517,31 @@ static gboolean sink_event_stream_start(struct wg_source *source, GstPad *pad, G static gboolean sink_event_eos(struct wg_source *source, GstPad *pad, GstEvent *event) { - guint index; + struct source_stream *stream = source_stream_from_pad(source, pad); - GST_TRACE("source %p, pad %p, event %p", source, pad, event); + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, event); - for (index = 0; index < source->stream_count; index++) - if (source->streams[index].pad == pad) - break; + stream->eos = true; - if (index < source->stream_count) - source->streams[index].eos = true; + gst_event_unref(event); + return true; +} + +static gboolean sink_event_flush_stop(struct wg_source *source, GstPad *pad, GstEvent *event) +{ + struct source_stream *stream = source_stream_from_pad(source, pad); + GstBuffer *buffer; + + GST_TRACE("source %p, %"GST_PTR_FORMAT", %"GST_PTR_FORMAT, source, pad, event); + + if (stream->buffer) + { + gst_buffer_unref(stream->buffer); + stream->buffer = NULL; + } + + while ((buffer = gst_atomic_queue_pop(stream->queue))) + gst_buffer_unref(buffer); gst_event_unref(event); return true; @@ -443,58 +561,53 @@ static gboolean sink_event_cb(GstPad *pad, GstObject *parent, GstEvent *event) return sink_event_stream_start(source, pad, event); case GST_EVENT_EOS: return sink_event_eos(source, pad, event); + case GST_EVENT_FLUSH_STOP: + return sink_event_flush_stop(source, pad, event); default: return gst_pad_event_default(pad, parent, event); } } -static GstEvent *create_stream_start_event(const char *stream_id) -{ - GstStream *stream; - GstEvent *event; - - if (!(stream = gst_stream_new(stream_id, NULL, GST_STREAM_TYPE_UNKNOWN, 0))) - return NULL; - gst_stream_set_stream_flags(stream, GST_STREAM_FLAG_SELECT); - if ((event = gst_event_new_stream_start(stream_id))) - { - gst_event_set_stream(event, stream); - gst_object_unref(stream); - } - - return event; -} - static void pad_added_cb(GstElement *element, GstPad *pad, gpointer user) { struct wg_source *source = user; - char stream_id[256]; - GstFlowReturn ret; - GstPad *sink_pad; - GstEvent *event; - guint index; + struct source_stream *stream; + char stream_id[256], *id; + + GST_TRACE("source %p, %"GST_PTR_FORMAT", %p", source, pad, user); - GST_TRACE("source %p, element %p, pad %p.", source, element, pad); - if ((index = source->stream_count++) >= ARRAY_SIZE(source->streams)) + stream = source->streams + source->stream_count++; + if (stream >= source->streams + ARRAY_SIZE(source->streams)) { GST_FIXME("Not enough sink pads, need %u", source->stream_count); return; } - sink_pad = source->streams[index].pad; - if (gst_pad_link(pad, sink_pad) < 0 || !gst_pad_set_active(sink_pad, true)) - GST_ERROR("Failed to link new pad to sink pad %p", sink_pad); + if (gst_pad_link(pad, stream->pad) < 0 || !gst_pad_set_active(stream->pad, true)) + GST_ERROR("Failed to link new pad to sink pad %p", stream->pad); - snprintf(stream_id, ARRAY_SIZE(stream_id), "wg_source/%03u", index); - if (!(event = create_stream_start_event(stream_id))) - GST_ERROR("Failed to create stream event for sink pad %p", sink_pad); + if ((stream->stream = gst_pad_get_stream(pad))) + { + GST_TRACE("got pad %"GST_PTR_FORMAT" stream %"GST_PTR_FORMAT, pad, stream->stream); + gst_stream_set_stream_flags(stream->stream, GST_STREAM_FLAG_SELECT); + } else { - if ((ret = gst_pad_store_sticky_event(pad, event)) < 0) - GST_ERROR("Failed to create pad %p stream, ret %d", sink_pad, ret); - if ((ret = gst_pad_store_sticky_event(sink_pad, event)) < 0) - GST_ERROR("Failed to create pad %p stream, ret %d", sink_pad, ret); - gst_event_unref(event); + if (!(id = gst_pad_get_stream_id(pad))) + { + snprintf(stream_id, ARRAY_SIZE(stream_id), "wg_source/%03zu", stream - source->streams); + id = g_strdup(stream_id); + } + + if (!(stream->stream = gst_stream_new(id, NULL, GST_STREAM_TYPE_UNKNOWN, 0))) + GST_ERROR("Failed to create stream event for sink pad %p", stream->pad); + else + { + GST_TRACE("created stream %"GST_PTR_FORMAT" for pad %"GST_PTR_FORMAT, stream->stream, stream->pad); + gst_stream_set_stream_flags(stream->stream, GST_STREAM_FLAG_SELECT); + } + + g_free(id); } } @@ -505,7 +618,6 @@ NTSTATUS wg_source_create(void *args) GstCaps *src_caps, *any_caps; struct wg_source *source; const gchar *media_type; - GstEvent *event; GstPad *peer; guint i; @@ -537,6 +649,7 @@ NTSTATUS wg_source_create(void *args) gst_pad_set_element_private(source->src_pad, source); gst_pad_set_query_function(source->src_pad, src_query_cb); gst_pad_set_event_function(source->src_pad, src_event_cb); + source->seek_queue = gst_atomic_queue_new(1); for (i = 0; i < ARRAY_SIZE(source->streams); i++) { @@ -579,12 +692,11 @@ NTSTATUS wg_source_create(void *args) gst_element_set_state(source->container, GST_STATE_PAUSED); if (!gst_element_get_state(source->container, NULL, NULL, -1)) goto error; - - if (!(event = create_stream_start_event("wg_source")) - || !push_event(source->src_pad, event)) - goto error; gst_caps_unref(src_caps); + push_event(source->src_pad, create_stream_start_event("wg_source")); + push_event(source->src_pad, gst_event_new_segment(&source->segment)); + params->source = (wg_source_t)(ULONG_PTR)source; GST_INFO("Created winegstreamer source %p.", source); return STATUS_SUCCESS; @@ -615,13 +727,68 @@ NTSTATUS wg_source_create(void *args) return STATUS_UNSUCCESSFUL; } +static NTSTATUS initialize_transcoding(struct wg_source *source) +{ + GstElement *element, *first = NULL, *last = NULL; + GstEvent *event; + guint i; + + gst_element_set_state(source->container, GST_STATE_NULL); + gst_object_unref(source->container); + + for (i = 0; i < ARRAY_SIZE(source->streams); i++) + gst_pad_set_active(source->streams[i].pad, false); + gst_pad_set_active(source->src_pad, false); + + while ((event = gst_atomic_queue_pop(source->seek_queue))) + gst_event_unref(event); + + source->segment.start = 0; + source->needs_transcoding = false; + source->max_duration = 0; + source->stream_count = 0; + + if (!(source->container = gst_bin_new("wg_source"))) + goto error; + GST_OBJECT_FLAG_SET(source->container, GST_BIN_FLAG_STREAMS_AWARE); + + if (!(element = create_element("protondemuxer", "proton")) + || !append_element(source->container, element, &first, &last)) + goto error; + g_signal_connect(element, "pad-added", G_CALLBACK(pad_added_cb), source); + + if (!link_src_to_element(source->src_pad, first)) + goto error; + if (!gst_pad_set_active(source->src_pad, true)) + goto error; + + gst_element_set_state(source->container, GST_STATE_PAUSED); + if (!gst_element_get_state(source->container, NULL, NULL, -1)) + goto error; + + push_event(source->src_pad, create_stream_start_event("wg_source")); + push_event(source->src_pad, gst_event_new_segment(&source->segment)); + + GST_INFO("Re-initialized source %p.", source); + return STATUS_SUCCESS; + +error: + GST_ERROR("Failed to re-initialize source %p", source); + return STATUS_UNSUCCESSFUL; +} + NTSTATUS wg_source_destroy(void *args) { struct wg_source *source = get_source(*(wg_source_t *)args); + GstEvent *event; guint i; GST_TRACE("source %p", source); + while ((event = gst_atomic_queue_pop(source->seek_queue))) + gst_event_unref(event); + gst_atomic_queue_unref(source->seek_queue); + gst_element_set_state(source->container, GST_STATE_NULL); gst_object_unref(source->container); for (i = 0; i < ARRAY_SIZE(source->streams); i++) @@ -673,9 +840,16 @@ NTSTATUS wg_source_get_position(void *args) { struct wg_source_get_position_params *params = args; struct wg_source *source = get_source(params->source); + GstEvent *event; GST_TRACE("source %p", source); + while ((event = gst_atomic_queue_pop(source->seek_queue))) + { + source_handle_seek(source, event); + gst_event_unref(event); + } + params->read_offset = source->segment.start; return STATUS_SUCCESS; } @@ -685,15 +859,13 @@ NTSTATUS wg_source_set_position(void *args) struct wg_source_set_position_params *params = args; struct wg_source *source = get_source(params->source); guint64 time = params->time * 100; - GstEvent *event; guint i; - GST_TRACE("source %p", source); + GST_TRACE("source %p, time %"G_GINT64_MODIFIER"d", source, time); - if (!(event = gst_event_new_seek(1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, - GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_NONE, -1)) - || !gst_pad_push_event(source->streams[0].pad, event)) - GST_WARNING("Failed to seek source %p to %" G_GINT64_MODIFIER "x", source, time); + push_event(source->streams[0].pad, gst_event_new_seek(1.0, GST_FORMAT_TIME, + GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_SNAP_BEFORE | GST_SEEK_FLAG_KEY_UNIT, + GST_SEEK_TYPE_SET, time, GST_SEEK_TYPE_NONE, -1)); for (i = 0; i < source->stream_count; i++) { @@ -716,49 +888,43 @@ NTSTATUS wg_source_push_data(void *args) struct wg_source *source = get_source(params->source); GstFlowReturn ret = GST_FLOW_OK; GstBuffer *buffer; - GstEvent *event; - - GST_TRACE("source %p, data %p, size %#x", source, params->data, params->size); - if (!source->valid_segment) - { - if (!(event = gst_event_new_segment(&source->segment)) - || !gst_pad_push_event(source->src_pad, event)) - GST_ERROR("Failed to push new segment event"); - source->valid_segment = true; - } + GST_TRACE("source %p, offset %#"G_GINT64_MODIFIER"x, data %p, size %#x", source, params->offset, params->data, params->size); if (!params->size) { - if (source->segment.start != source->segment.stop) - goto eos; + push_event(source->src_pad, gst_event_new_eos()); return STATUS_SUCCESS; } - if (!(buffer = create_buffer_from_bytes(params->data, params->size))) + if (!(buffer = create_buffer_from_bytes(params->offset, params->data, params->size))) { GST_WARNING("Failed to allocate buffer for data"); return STATUS_UNSUCCESSFUL; } - source->segment.start += params->size; - if ((ret = gst_pad_push(source->src_pad, buffer)) && ret != GST_FLOW_EOS) + if (params->offset > source->segment.start) { - GST_WARNING("Failed to push data buffer, ret %d", ret); - source->segment.start -= params->size; - return STATUS_UNSUCCESSFUL; + source->segment.start = params->offset; + gst_buffer_set_flags(buffer, GST_BUFFER_FLAG_DISCONT); + } + else if (params->offset < source->segment.start) + { + source->segment.start = params->offset; + push_event(source->src_pad, gst_event_new_segment(&source->segment)); } - if (source->segment.start != source->segment.stop) + source->push_thread = pthread_self(); + source->segment.start += params->size; + if (!(ret = gst_pad_push(source->src_pad, buffer)) || ret == GST_FLOW_EOS) return STATUS_SUCCESS; -eos: - if (!(event = gst_event_new_eos()) - || !gst_pad_push_event(source->src_pad, event)) - GST_WARNING("Failed to push EOS event"); - source->segment.start = source->segment.stop; + if (source->needs_transcoding) + return initialize_transcoding(source); - return STATUS_SUCCESS; + GST_WARNING("Failed to push data buffer, ret %d", ret); + source->segment.start -= params->size; + return STATUS_UNSUCCESSFUL; } NTSTATUS wg_source_read_data(void *args) @@ -773,6 +939,9 @@ NTSTATUS wg_source_read_data(void *args) GST_TRACE("source %p, index %#x, sample %p", source, index, sample); + if (gst_atomic_queue_length(source->seek_queue)) + return STATUS_PENDING; + if ((status = source_get_stream_buffer(source, index, &buffer))) return status; @@ -820,21 +989,21 @@ NTSTATUS wg_source_read_data(void *args) return status; } -NTSTATUS wg_source_get_stream_format(void *args) +NTSTATUS wg_source_get_stream_type(void *args) { - struct wg_source_get_stream_format_params *params = args; + struct wg_source_get_stream_type_params *params = args; struct wg_source *source = get_source(params->source); guint index = params->index; + NTSTATUS status; GstCaps *caps; GST_TRACE("source %p, index %u", source, index); if (!(caps = source_get_stream_caps(source, index))) return STATUS_UNSUCCESSFUL; - wg_format_from_caps(¶ms->format, caps); - + status = caps_to_media_type(caps, ¶ms->media_type, 0); gst_caps_unref(caps); - return STATUS_SUCCESS; + return status; } static gchar *stream_lang_from_tags(GstTagList *tags, bool is_quicktime) diff --git a/dlls/winegstreamer/wg_transform.c b/dlls/winegstreamer/wg_transform.c index e4f166982b1..5adb7d3994e 100644 --- a/dlls/winegstreamer/wg_transform.c +++ b/dlls/winegstreamer/wg_transform.c @@ -36,6 +36,7 @@ #define WIN32_NO_STATUS #include "winternl.h" #include "mferror.h" +#include "mfapi.h" #include "unix_private.h" @@ -52,15 +53,17 @@ struct wg_transform GstQuery *drain_query; GstAtomicQueue *input_queue; + MFVideoInfo input_info; + MFVideoInfo output_info; - bool input_is_flipped; - GstElement *video_flip; - - struct wg_format output_format; GstAtomicQueue *output_queue; GstSample *output_sample; bool output_caps_changed; + GstCaps *desired_caps; GstCaps *output_caps; + GstCaps *input_caps; + + bool draining; }; static struct wg_transform *get_transform(wg_transform_t trans) @@ -68,18 +71,189 @@ static struct wg_transform *get_transform(wg_transform_t trans) return (struct wg_transform *)(ULONG_PTR)trans; } -static void align_video_info_planes(gsize plane_align, GstVideoInfo *info, GstVideoAlignment *align) +static BOOL is_mf_video_area_empty(const MFVideoArea *area) +{ + return !area->OffsetX.value && !area->OffsetY.value && !area->Area.cx && !area->Area.cy; +} + +static void align_video_info_planes(MFVideoInfo *video_info, gsize plane_align, + GstVideoInfo *info, GstVideoAlignment *align) { + const MFVideoArea *aperture = &video_info->MinimumDisplayAperture; + gst_video_alignment_reset(align); align->padding_right = ((plane_align + 1) - (info->width & plane_align)) & plane_align; align->padding_bottom = ((plane_align + 1) - (info->height & plane_align)) & plane_align; + if (!is_mf_video_area_empty(aperture) && !plane_align) + { + align->padding_right = max(align->padding_right, video_info->dwWidth - aperture->OffsetX.value - aperture->Area.cx); + align->padding_bottom = max(align->padding_bottom, video_info->dwHeight - aperture->OffsetY.value - aperture->Area.cy); + align->padding_top = aperture->OffsetX.value; + align->padding_left = aperture->OffsetY.value; + } + + if (video_info->VideoFlags & MFVideoFlag_BottomUpLinearRep) + { + gsize top = align->padding_top; + align->padding_top = align->padding_bottom; + align->padding_bottom = top; + } + align->stride_align[0] = plane_align; align->stride_align[1] = plane_align; align->stride_align[2] = plane_align; align->stride_align[3] = plane_align; gst_video_info_align(info, align); + + if (video_info->VideoFlags & MFVideoFlag_BottomUpLinearRep) + { + for (guint i = 0; i < ARRAY_SIZE(info->offset); ++i) + { + info->offset[i] += (info->height - 1) * info->stride[i]; + info->stride[i] = -info->stride[i]; + } + } +} + +static void init_mf_video_info_rect(const MFVideoInfo *info, RECT *rect) +{ + if (!is_mf_video_area_empty(&info->MinimumDisplayAperture)) + { + rect->left = info->MinimumDisplayAperture.OffsetX.value; + rect->top = info->MinimumDisplayAperture.OffsetY.value; + rect->right = rect->left + info->MinimumDisplayAperture.Area.cx; + rect->bottom = rect->top + info->MinimumDisplayAperture.Area.cy; + } + else + { + rect->left = 0; + rect->top = 0; + rect->right = info->dwWidth; + rect->bottom = info->dwHeight; + } +} + +static inline BOOL intersect_rect(RECT *dst, const RECT *src1, const RECT *src2) +{ + dst->left = max(src1->left, src2->left); + dst->top = max(src1->top, src2->top); + dst->right = min(src1->right, src2->right); + dst->bottom = min(src1->bottom, src2->bottom); + return !IsRectEmpty(dst); +} + +static void update_video_aperture(MFVideoInfo *input_info, MFVideoInfo *output_info) +{ + RECT rect, input_rect, output_rect; + + init_mf_video_info_rect(input_info, &input_rect); + init_mf_video_info_rect(output_info, &output_rect); + intersect_rect(&rect, &input_rect, &output_rect); + + input_info->MinimumDisplayAperture.OffsetX.value = rect.left; + input_info->MinimumDisplayAperture.OffsetY.value = rect.top; + input_info->MinimumDisplayAperture.Area.cx = rect.right - rect.left; + input_info->MinimumDisplayAperture.Area.cy = rect.bottom - rect.top; + output_info->MinimumDisplayAperture = input_info->MinimumDisplayAperture; +} + +static void set_video_caps_aperture(GstCaps *caps, MFVideoInfo *video_info) +{ + if (!is_mf_video_area_empty(&video_info->MinimumDisplayAperture)) + { + gst_caps_set_simple(caps, "width", G_TYPE_INT, video_info->MinimumDisplayAperture.Area.cx, NULL); + gst_caps_set_simple(caps, "height", G_TYPE_INT, video_info->MinimumDisplayAperture.Area.cy, NULL); + } +} + +typedef struct +{ + GstVideoBufferPool parent; + GstVideoInfo info; +} WgVideoBufferPool; + +typedef struct +{ + GstVideoBufferPoolClass parent_class; +} WgVideoBufferPoolClass; + +G_DEFINE_TYPE(WgVideoBufferPool, wg_video_buffer_pool, GST_TYPE_VIDEO_BUFFER_POOL); + +static void buffer_add_video_meta(GstBuffer *buffer, GstVideoInfo *info) +{ + GstVideoMeta *meta; + + if (!(meta = gst_buffer_get_video_meta(buffer))) + meta = gst_buffer_add_video_meta(buffer, GST_VIDEO_FRAME_FLAG_NONE, + info->finfo->format, info->width, info->height); + + if (!meta) + GST_ERROR("Failed to add video meta to buffer %"GST_PTR_FORMAT, buffer); + else + { + memcpy(meta->offset, info->offset, sizeof(info->offset)); + memcpy(meta->stride, info->stride, sizeof(info->stride)); + } +} + +static GstFlowReturn wg_video_buffer_pool_alloc_buffer(GstBufferPool *gst_pool, GstBuffer **buffer, + GstBufferPoolAcquireParams *params) +{ + GstBufferPoolClass *parent_class = GST_BUFFER_POOL_CLASS(wg_video_buffer_pool_parent_class); + WgVideoBufferPool *pool = (WgVideoBufferPool *)gst_pool; + GstFlowReturn ret; + + GST_LOG("%"GST_PTR_FORMAT", buffer %p, params %p", pool, buffer, params); + + if (!(ret = parent_class->alloc_buffer(gst_pool, buffer, params))) + { + buffer_add_video_meta(*buffer, &pool->info); + GST_INFO("%"GST_PTR_FORMAT" allocated buffer %"GST_PTR_FORMAT, pool, *buffer); + } + + return ret; +} + +static void wg_video_buffer_pool_init(WgVideoBufferPool *pool) +{ +} + +static void wg_video_buffer_pool_class_init(WgVideoBufferPoolClass *klass) +{ + GstBufferPoolClass *pool_class = GST_BUFFER_POOL_CLASS(klass); + pool_class->alloc_buffer = wg_video_buffer_pool_alloc_buffer; +} + +static WgVideoBufferPool *wg_video_buffer_pool_create(GstCaps *caps, gsize plane_align, + GstAllocator *allocator, MFVideoInfo *video_info, GstVideoAlignment *align) +{ + WgVideoBufferPool *pool; + GstStructure *config; + + if (!(pool = g_object_new(wg_video_buffer_pool_get_type(), NULL))) + return NULL; + + gst_video_info_from_caps(&pool->info, caps); + align_video_info_planes(video_info, plane_align, &pool->info, align); + + if (!(config = gst_buffer_pool_get_config(GST_BUFFER_POOL(pool)))) + GST_ERROR("Failed to get %"GST_PTR_FORMAT" config.", pool); + else + { + gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META); + gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); + gst_buffer_pool_config_set_video_alignment(config, align); + + gst_buffer_pool_config_set_params(config, caps, pool->info.size, 0, 0); + gst_buffer_pool_config_set_allocator(config, allocator, NULL); + if (!gst_buffer_pool_set_config(GST_BUFFER_POOL(pool), config)) + GST_ERROR("Failed to set %"GST_PTR_FORMAT" config.", pool); + } + + GST_INFO("Created %"GST_PTR_FORMAT, pool); + return pool; } static GstFlowReturn transform_sink_chain_cb(GstPad *pad, GstObject *parent, GstBuffer *buffer) @@ -96,7 +270,7 @@ static GstFlowReturn transform_sink_chain_cb(GstPad *pad, GstObject *parent, Gst return GST_FLOW_ERROR; } - if (transform->output_caps_changed) + if (transform->output_caps_changed && transform->attrs.allow_format_change) GST_MINI_OBJECT_FLAG_SET(sample, GST_SAMPLE_FLAG_WG_CAPS_CHANGED); transform->output_caps_changed = false; @@ -128,25 +302,24 @@ static gboolean transform_src_query_cb(GstPad *pad, GstObject *parent, GstQuery static gboolean transform_sink_query_allocation(struct wg_transform *transform, GstQuery *query) { - gsize plane_align = transform->attrs.output_plane_align; - GstStructure *config, *params; + WgVideoBufferPool *pool; GstVideoAlignment align; + const char *mime_type; + GstStructure *params; gboolean needs_pool; - GstBufferPool *pool; - GstVideoInfo info; GstCaps *caps; GST_LOG("transform %p, %"GST_PTR_FORMAT, transform, query); gst_query_parse_allocation(query, &caps, &needs_pool); - if (stream_type_from_caps(caps) != GST_STREAM_TYPE_VIDEO || !needs_pool) - return false; - if (!gst_video_info_from_caps(&info, caps) - || !(pool = gst_video_buffer_pool_new())) + mime_type = gst_structure_get_name(gst_caps_get_structure(caps, 0)); + if (strcmp(mime_type, "video/x-raw") || !needs_pool) return false; - align_video_info_planes(plane_align, &info, &align); + if (!(pool = wg_video_buffer_pool_create(caps, transform->attrs.output_plane_align, + transform->allocator, &transform->output_info, &align))) + return false; if ((params = gst_structure_new("video-meta", "padding-top", G_TYPE_UINT, align.padding_top, @@ -159,57 +332,67 @@ static gboolean transform_sink_query_allocation(struct wg_transform *transform, gst_structure_free(params); } - if (!(config = gst_buffer_pool_get_config(pool))) - GST_ERROR("Failed to get %"GST_PTR_FORMAT" config.", pool); - else - { - gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_META); - gst_buffer_pool_config_add_option(config, GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT); - gst_buffer_pool_config_set_video_alignment(config, &align); - - gst_buffer_pool_config_set_params(config, caps, - info.size, 0, 0); - gst_buffer_pool_config_set_allocator(config, transform->allocator, NULL); - if (!gst_buffer_pool_set_config(pool, config)) - GST_ERROR("Failed to set %"GST_PTR_FORMAT" config.", pool); - } - /* Prevent pool reconfiguration, we don't want another alignment. */ - if (!gst_buffer_pool_set_active(pool, true)) + if (!gst_buffer_pool_set_active(GST_BUFFER_POOL(pool), true)) GST_ERROR("%"GST_PTR_FORMAT" failed to activate.", pool); - gst_query_add_allocation_pool(query, pool, info.size, 0, 0); + gst_query_add_allocation_pool(query, GST_BUFFER_POOL(pool), pool->info.size, 0, 0); gst_query_add_allocation_param(query, transform->allocator, NULL); GST_INFO("Proposing %"GST_PTR_FORMAT", buffer size %#zx, %"GST_PTR_FORMAT", for %"GST_PTR_FORMAT, - pool, info.size, transform->allocator, query); + pool, pool->info.size, transform->allocator, query); g_object_unref(pool); return true; } -static GstCaps *transform_format_to_caps(struct wg_transform *transform, const struct wg_format *format) +static void caps_remove_field(GstCaps *caps, const char *field) { - struct wg_format copy = *format; + guint i; - if (format->major_type == WG_MAJOR_TYPE_VIDEO) + for (i = 0; i < gst_caps_get_size(caps); ++i) { - if (transform->attrs.allow_size_change) - copy.u.video.width = copy.u.video.height = 0; - copy.u.video.fps_n = copy.u.video.fps_d = 0; + GstStructure *structure = gst_caps_get_structure(caps, i); + gst_structure_remove_fields(structure, field, NULL); } +} + +static GstCaps *caps_strip_fields(GstCaps *caps, bool strip_size) +{ + if (stream_type_from_caps(caps) != GST_STREAM_TYPE_VIDEO) + return gst_caps_ref(caps); + + if ((caps = gst_caps_copy(caps))) + { + if (strip_size) + { + caps_remove_field(caps, "width"); + caps_remove_field(caps, "height"); + } - return wg_format_to_caps(©); + /* strip fields which we do not support and could cause pipeline failure or spurious format changes */ + caps_remove_field(caps, "framerate"); + caps_remove_field(caps, "colorimetry"); + caps_remove_field(caps, "chroma-site"); + caps_remove_field(caps, "interlace-mode"); + caps_remove_field(caps, "pixel-aspect-ratio"); + } + + return caps; } static gboolean transform_sink_query_caps(struct wg_transform *transform, GstQuery *query) { GstCaps *caps, *filter, *temp; + bool strip_size = false; GST_LOG("transform %p, %"GST_PTR_FORMAT, transform, query); gst_query_parse_caps(query, &filter); - if (!(caps = transform_format_to_caps(transform, &transform->output_format))) + if (filter && gst_structure_has_field(gst_caps_get_structure(filter, 0), "width")) + strip_size = transform->attrs.allow_format_change; + + if (!(caps = caps_strip_fields(transform->desired_caps, strip_size))) return false; if (filter) @@ -248,23 +431,6 @@ static gboolean transform_sink_query_cb(GstPad *pad, GstObject *parent, GstQuery return gst_pad_query_default(pad, parent, query); } -static gboolean transform_output_caps_is_compatible(struct wg_transform *transform, GstCaps *caps) -{ - GstCaps *copy = gst_caps_copy(caps); - gboolean ret; - gsize i; - - for (i = 0; i < gst_caps_get_size(copy); ++i) - { - GstStructure *structure = gst_caps_get_structure(copy, i); - gst_structure_remove_fields(structure, "framerate", NULL); - } - - ret = gst_caps_is_always_compatible(transform->output_caps, copy); - gst_caps_unref(copy); - return ret; -} - static void transform_sink_event_caps(struct wg_transform *transform, GstEvent *event) { GstCaps *caps; @@ -273,9 +439,9 @@ static void transform_sink_event_caps(struct wg_transform *transform, GstEvent * gst_event_parse_caps(event, &caps); - transform->output_caps_changed = transform->output_caps_changed - || !transform_output_caps_is_compatible(transform, caps); - + transform->output_caps_changed = true; + gst_caps_unref(transform->desired_caps); + transform->desired_caps = gst_caps_ref(caps); gst_caps_unref(transform->output_caps); transform->output_caps = gst_caps_ref(caps); } @@ -320,35 +486,56 @@ NTSTATUS wg_transform_destroy(void *args) g_object_unref(transform->my_sink); g_object_unref(transform->my_src); gst_query_unref(transform->drain_query); + gst_caps_unref(transform->desired_caps); gst_caps_unref(transform->output_caps); + gst_caps_unref(transform->input_caps); gst_atomic_queue_unref(transform->output_queue); free(transform); return STATUS_SUCCESS; } -static bool wg_format_video_is_flipped(const struct wg_format *format) +static GstCaps *transform_get_parsed_caps(GstCaps *caps, const char *media_type) { - return format->major_type == WG_MAJOR_TYPE_VIDEO && (format->u.video.height < 0); + GstStructure *structure = gst_caps_get_structure(caps, 0); + GstCaps *parsed_caps; + gint value; + + if (!(parsed_caps = gst_caps_new_empty_simple(media_type))) + return NULL; + + if (gst_structure_get_int(structure, "mpegversion", &value)) + { + if (value == 4) + gst_caps_set_simple(parsed_caps, "framed", G_TYPE_BOOLEAN, true, "mpegversion", G_TYPE_INT, 4, NULL); + else + { + gst_caps_set_simple(parsed_caps, "parsed", G_TYPE_BOOLEAN, true, "mpegversion", G_TYPE_INT, value, NULL); + if (gst_structure_get_int(structure, "layer", &value)) + gst_caps_set_simple(parsed_caps, "layer", G_TYPE_INT, value, NULL); + } + } + else if (gst_structure_get_int(structure, "wmaversion", &value)) + gst_caps_set_simple(parsed_caps, "parsed", G_TYPE_BOOLEAN, true, "wmaversion", G_TYPE_INT, value, NULL); + else if (gst_structure_get_int(structure, "wmvversion", &value)) + gst_caps_set_simple(parsed_caps, "parsed", G_TYPE_BOOLEAN, true, "wmvversion", G_TYPE_INT, value, NULL); + else + gst_caps_set_simple(parsed_caps, "parsed", G_TYPE_BOOLEAN, true, NULL); + + return parsed_caps; } NTSTATUS wg_transform_create(void *args) { struct wg_transform_create_params *params = args; - struct wg_format output_format = *params->output_format; - struct wg_format input_format = *params->input_format; GstElement *first = NULL, *last = NULL, *element; - GstCaps *raw_caps = NULL, *src_caps = NULL; + GstCaps *sink_caps = NULL, *parsed_caps = NULL; NTSTATUS status = STATUS_UNSUCCESSFUL; + const gchar *input_mime, *output_mime; GstPadTemplate *template = NULL; struct wg_transform *transform; - const gchar *media_type; GstEvent *event; - /* to detect h264_decoder_create() */ - if (input_format.major_type == WG_MAJOR_TYPE_VIDEO_H264) - touch_h264_used_tag(); - if (!(transform = calloc(1, sizeof(*transform)))) return STATUS_NO_MEMORY; if (!(transform->container = gst_bin_new("wg_transform"))) @@ -361,25 +548,46 @@ NTSTATUS wg_transform_create(void *args) goto out; if (!(transform->allocator = wg_allocator_create())) goto out; - transform->attrs = *params->attrs; - transform->output_format = output_format; + transform->attrs = params->attrs; + + if (!(transform->input_caps = caps_from_media_type(¶ms->input_type))) + goto out; + GST_INFO("transform %p input caps %"GST_PTR_FORMAT, transform, transform->input_caps); + input_mime = gst_structure_get_name(gst_caps_get_structure(transform->input_caps, 0)); - if (!(src_caps = transform_format_to_caps(transform, &input_format))) + if (!strcmp(input_mime, "video/x-h264")) + touch_h264_used_tag(); + + if (!(transform->output_caps = caps_from_media_type(¶ms->output_type))) goto out; - if (!(template = gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, src_caps))) + GST_INFO("transform %p output caps %"GST_PTR_FORMAT, transform, transform->output_caps); + output_mime = gst_structure_get_name(gst_caps_get_structure(transform->output_caps, 0)); + + if (IsEqualGUID(¶ms->input_type.major, &MFMediaType_Video)) + transform->input_info = params->input_type.u.video->videoInfo; + if (IsEqualGUID(¶ms->output_type.major, &MFMediaType_Video)) + transform->output_info = params->output_type.u.video->videoInfo; + + /* update the video apertures to make sure GStreamer has a consistent input/output frame size */ + if (!strcmp(input_mime, "video/x-raw") && !strcmp(output_mime, "video/x-raw")) + update_video_aperture(&transform->input_info, &transform->output_info); + + if (IsEqualGUID(¶ms->input_type.major, &MFMediaType_Video)) + set_video_caps_aperture(transform->input_caps, &transform->input_info); + if (IsEqualGUID(¶ms->output_type.major, &MFMediaType_Video)) + set_video_caps_aperture(transform->output_caps, &transform->output_info); + + if (!(template = gst_pad_template_new("src", GST_PAD_SRC, GST_PAD_ALWAYS, transform->input_caps))) goto out; transform->my_src = gst_pad_new_from_template(template, "src"); g_object_unref(template); if (!transform->my_src) goto out; - GST_INFO("transform %p input caps %"GST_PTR_FORMAT, transform, src_caps); - gst_pad_set_element_private(transform->my_src, transform); gst_pad_set_query_function(transform->my_src, transform_src_query_cb); - if (!(transform->output_caps = transform_format_to_caps(transform, &output_format))) - goto out; + transform->desired_caps = gst_caps_ref(transform->output_caps); if (!(template = gst_pad_template_new("sink", GST_PAD_SINK, GST_PAD_ALWAYS, transform->output_caps))) goto out; transform->my_sink = gst_pad_new_from_template(template, "sink"); @@ -387,61 +595,51 @@ NTSTATUS wg_transform_create(void *args) if (!transform->my_sink) goto out; - GST_INFO("transform %p output caps %"GST_PTR_FORMAT, transform, transform->output_caps); - gst_pad_set_element_private(transform->my_sink, transform); gst_pad_set_event_function(transform->my_sink, transform_sink_event_cb); gst_pad_set_query_function(transform->my_sink, transform_sink_query_cb); gst_pad_set_chain_function(transform->my_sink, transform_sink_chain_cb); + if (!(parsed_caps = transform_get_parsed_caps(transform->input_caps, input_mime))) + goto out; + /* Since we append conversion elements, we don't want to filter decoders * based on the actual output caps now. Matching decoders with the * raw output media type should be enough. */ - media_type = gst_structure_get_name(gst_caps_get_structure(transform->output_caps, 0)); - if (!(raw_caps = gst_caps_new_empty_simple(media_type))) + if (!(sink_caps = gst_caps_new_empty_simple(output_mime))) goto out; - switch (input_format.major_type) + if (strcmp(input_mime, "audio/x-raw") && strcmp(input_mime, "video/x-raw")) { - case WG_MAJOR_TYPE_VIDEO_H264: - if (!(element = create_element("h264parse", "bad")) - || !append_element(transform->container, element, &first, &last)) - goto out; - /* FALLTHROUGH */ - case WG_MAJOR_TYPE_AUDIO_ENCODED: - case WG_MAJOR_TYPE_AUDIO_MPEG1: - case WG_MAJOR_TYPE_AUDIO_MPEG4: - case WG_MAJOR_TYPE_AUDIO_WMA: - case WG_MAJOR_TYPE_VIDEO_ENCODED: - case WG_MAJOR_TYPE_VIDEO_CINEPAK: - case WG_MAJOR_TYPE_VIDEO_INDEO: - case WG_MAJOR_TYPE_VIDEO_WMV: - case WG_MAJOR_TYPE_VIDEO_MPEG1: - if (!(element = find_element(GST_ELEMENT_FACTORY_TYPE_DECODER, src_caps, raw_caps)) - || !append_element(transform->container, element, &first, &last)) - { - gst_caps_unref(raw_caps); - goto out; - } - set_max_threads(element); - break; - - case WG_MAJOR_TYPE_AUDIO: - case WG_MAJOR_TYPE_VIDEO: - transform->attrs.input_queue_length = 16; - break; - case WG_MAJOR_TYPE_UNKNOWN: - GST_FIXME("Format %u not implemented!", input_format.major_type); - gst_caps_unref(raw_caps); + if ((element = find_element(GST_ELEMENT_FACTORY_TYPE_PARSER, transform->input_caps, parsed_caps)) + && !append_element(transform->container, element, &first, &last)) goto out; - } + else if (!element) + { + gst_caps_unref(parsed_caps); + parsed_caps = gst_caps_ref(transform->input_caps); + } - gst_caps_unref(raw_caps); + if (!(element = find_element(GST_ELEMENT_FACTORY_TYPE_DECODER, parsed_caps, sink_caps)) + || !append_element(transform->container, element, &first, &last)) + goto out; + set_max_threads(element); + } + else + { + transform->attrs.input_queue_length = 16; + } - switch (output_format.major_type) + if (g_str_has_prefix(output_mime, "audio/")) { - case WG_MAJOR_TYPE_AUDIO: + if (strcmp(output_mime, "audio/x-raw")) + { + GST_FIXME("output caps %"GST_PTR_FORMAT" not implemented!", transform->output_caps); + goto out; + } + else + { /* The MF audio decoder transforms allow decoding to various formats * as well as resampling the audio at the same time, whereas * GStreamer decoder plugins usually only support decoding to a @@ -457,51 +655,24 @@ NTSTATUS wg_transform_create(void *args) if (!(element = create_element("audioresample", "base")) || !append_element(transform->container, element, &first, &last)) goto out; - break; + } + } - case WG_MAJOR_TYPE_VIDEO: + if (g_str_has_prefix(output_mime, "video/")) + { + if (strcmp(output_mime, "video/x-raw")) { - const char *sgi; - if ((sgi = getenv("SteamGameId")) && ((!strcmp(sgi, "2009100")) || (!strcmp(sgi, "2555360")))) - { - if (!(element = create_element("videoconvert", "base")) - || !append_element(transform->container, element, &first, &last)) - goto out; - gst_util_set_object_arg(G_OBJECT(element), "n-threads", "0"); - /* HACK: skip slow?? videoflip for some games */ - break; - } + GST_FIXME("output caps %"GST_PTR_FORMAT" not implemented!", transform->output_caps); + goto out; } - - if (!(element = create_element("videoconvert", "base")) - || !append_element(transform->container, element, &first, &last)) - goto out; - if (!(transform->video_flip = create_element("videoflip", "base")) - || !append_element(transform->container, transform->video_flip, &first, &last)) - goto out; - transform->input_is_flipped = wg_format_video_is_flipped(&input_format); - if (transform->input_is_flipped != wg_format_video_is_flipped(&output_format)) - gst_util_set_object_arg(G_OBJECT(transform->video_flip), "method", "vertical-flip"); + else + { if (!(element = create_element("videoconvert", "base")) || !append_element(transform->container, element, &first, &last)) goto out; /* Let GStreamer choose a default number of threads. */ gst_util_set_object_arg(G_OBJECT(element), "n-threads", "0"); - break; - - case WG_MAJOR_TYPE_UNKNOWN: - case WG_MAJOR_TYPE_AUDIO_MPEG1: - case WG_MAJOR_TYPE_AUDIO_MPEG4: - case WG_MAJOR_TYPE_AUDIO_WMA: - case WG_MAJOR_TYPE_AUDIO_ENCODED: - case WG_MAJOR_TYPE_VIDEO_CINEPAK: - case WG_MAJOR_TYPE_VIDEO_H264: - case WG_MAJOR_TYPE_VIDEO_INDEO: - case WG_MAJOR_TYPE_VIDEO_WMV: - case WG_MAJOR_TYPE_VIDEO_MPEG1: - case WG_MAJOR_TYPE_VIDEO_ENCODED: - GST_FIXME("Format %u not implemented!", output_format.major_type); - goto out; + } } if (!link_src_to_element(transform->my_src, first)) @@ -520,12 +691,12 @@ NTSTATUS wg_transform_create(void *args) if (!(event = gst_event_new_stream_start("stream")) || !push_event(transform->my_src, event)) goto out; - if (!(event = gst_event_new_caps(src_caps)) + if (!(event = gst_event_new_caps(transform->input_caps)) || !push_event(transform->my_src, event)) goto out; /* Check that the caps event have been accepted */ - if (input_format.major_type == WG_MAJOR_TYPE_VIDEO_H264) + if (!strcmp(input_mime, "video/x-h264")) { GstPad *peer; if (!(peer = gst_pad_get_peer(transform->my_src))) @@ -547,7 +718,8 @@ NTSTATUS wg_transform_create(void *args) || !push_event(transform->my_src, event)) goto out; - gst_caps_unref(src_caps); + gst_caps_unref(parsed_caps); + gst_caps_unref(sink_caps); GST_INFO("Created winegstreamer transform %p.", transform); params->transform = (wg_transform_t)(ULONG_PTR)transform; @@ -556,12 +728,18 @@ NTSTATUS wg_transform_create(void *args) out: if (transform->my_sink) gst_object_unref(transform->my_sink); + if (transform->desired_caps) + gst_caps_unref(transform->desired_caps); if (transform->output_caps) gst_caps_unref(transform->output_caps); if (transform->my_src) gst_object_unref(transform->my_src); - if (src_caps) - gst_caps_unref(src_caps); + if (transform->input_caps) + gst_caps_unref(transform->input_caps); + if (parsed_caps) + gst_caps_unref(parsed_caps); + if (sink_caps) + gst_caps_unref(sink_caps); if (transform->allocator) wg_allocator_destroy(transform->allocator); if (transform->drain_query) @@ -580,28 +758,59 @@ NTSTATUS wg_transform_create(void *args) return status; } -NTSTATUS wg_transform_set_output_format(void *args) +NTSTATUS wg_transform_get_output_type(void *args) { - struct wg_transform_set_output_format_params *params = args; + struct wg_transform_get_output_type_params *params = args; struct wg_transform *transform = get_transform(params->transform); - const struct wg_format *format = params->format; + GstCaps *output_caps; + + if (transform->output_sample) + output_caps = gst_sample_get_caps(transform->output_sample); + else + output_caps = transform->output_caps; + + GST_INFO("transform %p output caps %"GST_PTR_FORMAT, transform, output_caps); + + return caps_to_media_type(output_caps, ¶ms->media_type, transform->attrs.output_plane_align); +} + +NTSTATUS wg_transform_set_output_type(void *args) +{ + struct wg_transform_set_output_type_params *params = args; + struct wg_transform *transform = get_transform(params->transform); + const char *input_mime, *output_mime; + GstCaps *caps, *stripped; GstSample *sample; - GstCaps *caps; - if (!(caps = transform_format_to_caps(transform, format))) + if (!(caps = caps_from_media_type(¶ms->media_type))) { - GST_ERROR("Failed to convert format %p to caps.", format); + GST_ERROR("Failed to convert media type to caps."); return STATUS_UNSUCCESSFUL; } - transform->output_format = *format; + + input_mime = gst_structure_get_name(gst_caps_get_structure(transform->input_caps, 0)); + output_mime = gst_structure_get_name(gst_caps_get_structure(caps, 0)); + + if (IsEqualGUID(¶ms->media_type.major, &MFMediaType_Video)) + transform->output_info = params->media_type.u.video->videoInfo; + + /* update the video apertures to make sure GStreamer has a consistent input/output frame size */ + if (!strcmp(input_mime, "video/x-raw") && !strcmp(output_mime, "video/x-raw")) + update_video_aperture(&transform->input_info, &transform->output_info); + + if (IsEqualGUID(¶ms->media_type.major, &MFMediaType_Video)) + set_video_caps_aperture(caps, &transform->output_info); GST_INFO("transform %p output caps %"GST_PTR_FORMAT, transform, caps); - if (transform_output_caps_is_compatible(transform, caps)) + stripped = caps_strip_fields(caps, transform->attrs.allow_format_change); + if (gst_caps_is_always_compatible(transform->output_caps, stripped)) { + gst_caps_unref(stripped); gst_caps_unref(caps); return STATUS_SUCCESS; } + gst_caps_unref(stripped); if (!gst_pad_peer_query(transform->my_src, transform->drain_query)) { @@ -609,18 +818,9 @@ NTSTATUS wg_transform_set_output_format(void *args) return STATUS_UNSUCCESSFUL; } - gst_caps_unref(transform->output_caps); - transform->output_caps = caps; + gst_caps_unref(transform->desired_caps); + transform->desired_caps = caps; - if (transform->video_flip) - { - const char *value; - if (transform->input_is_flipped != wg_format_video_is_flipped(format)) - value = "vertical-flip"; - else - value = "none"; - gst_util_set_object_arg(G_OBJECT(transform->video_flip), "method", value); - } if (!push_event(transform->my_sink, gst_event_new_reconfigure())) { GST_ERROR("Failed to reconfigure transform %p.", transform); @@ -654,9 +854,18 @@ NTSTATUS wg_transform_push_data(void *args) struct wg_transform_push_data_params *params = args; struct wg_transform *transform = get_transform(params->transform); struct wg_sample *sample = params->sample; + const gchar *input_mime; + GstVideoInfo video_info; GstBuffer *buffer; guint length; + if (transform->draining) + { + GST_INFO("Refusing %u bytes, transform is draining", sample->size); + params->result = MF_E_NOTACCEPTING; + return STATUS_SUCCESS; + } + length = gst_atomic_queue_length(transform->input_queue); if (length >= transform->attrs.input_queue_length + 1) { @@ -677,6 +886,14 @@ NTSTATUS wg_transform_push_data(void *args) GST_INFO("Wrapped %u/%u bytes from sample %p to %"GST_PTR_FORMAT, sample->size, sample->max_size, sample, buffer); } + input_mime = gst_structure_get_name(gst_caps_get_structure(transform->input_caps, 0)); + if (!strcmp(input_mime, "video/x-raw") && gst_video_info_from_caps(&video_info, transform->input_caps)) + { + GstVideoAlignment align; + align_video_info_planes(&transform->input_info, 0, &video_info, &align); + buffer_add_video_meta(buffer, &video_info); + } + if (sample->flags & WG_SAMPLE_FLAG_HAS_PTS) GST_BUFFER_PTS(buffer) = sample->pts * 100; if (sample->flags & WG_SAMPLE_FLAG_HAS_DURATION) @@ -691,25 +908,14 @@ NTSTATUS wg_transform_push_data(void *args) return STATUS_SUCCESS; } -static NTSTATUS copy_video_buffer(GstBuffer *buffer, GstCaps *caps, gsize plane_align, - struct wg_sample *sample, gsize *total_size) +static NTSTATUS copy_video_buffer(GstBuffer *buffer, const GstVideoInfo *src_video_info, + const GstVideoInfo *dst_video_info, struct wg_sample *sample, gsize *total_size) { NTSTATUS status = STATUS_UNSUCCESSFUL; GstVideoFrame src_frame, dst_frame; - GstVideoInfo src_info, dst_info; - GstVideoAlignment align; GstBuffer *dst_buffer; - if (!gst_video_info_from_caps(&src_info, caps)) - { - GST_ERROR("Failed to get video info from caps."); - return STATUS_UNSUCCESSFUL; - } - - dst_info = src_info; - align_video_info_planes(plane_align, &dst_info, &align); - - if (sample->max_size < dst_info.size) + if (sample->max_size < dst_video_info->size) { GST_ERROR("Output buffer is too small."); return STATUS_BUFFER_TOO_SMALL; @@ -721,14 +927,14 @@ static NTSTATUS copy_video_buffer(GstBuffer *buffer, GstCaps *caps, gsize plane_ GST_ERROR("Failed to wrap wg_sample into GstBuffer"); return STATUS_UNSUCCESSFUL; } - gst_buffer_set_size(dst_buffer, dst_info.size); - *total_size = sample->size = dst_info.size; + gst_buffer_set_size(dst_buffer, dst_video_info->size); + *total_size = sample->size = dst_video_info->size; - if (!gst_video_frame_map(&src_frame, &src_info, buffer, GST_MAP_READ)) + if (!gst_video_frame_map(&src_frame, src_video_info, buffer, GST_MAP_READ)) GST_ERROR("Failed to map source frame."); else { - if (!gst_video_frame_map(&dst_frame, &dst_info, dst_buffer, GST_MAP_WRITE)) + if (!gst_video_frame_map(&dst_frame, dst_video_info, dst_buffer, GST_MAP_WRITE)) GST_ERROR("Failed to map destination frame."); else { @@ -745,8 +951,7 @@ static NTSTATUS copy_video_buffer(GstBuffer *buffer, GstCaps *caps, gsize plane_ return status; } -static NTSTATUS copy_buffer(GstBuffer *buffer, GstCaps *caps, struct wg_sample *sample, - gsize *total_size) +static NTSTATUS copy_buffer(GstBuffer *buffer, struct wg_sample *sample, gsize *total_size) { GstMapInfo info; @@ -771,13 +976,35 @@ static NTSTATUS copy_buffer(GstBuffer *buffer, GstCaps *caps, struct wg_sample * return STATUS_SUCCESS; } -static NTSTATUS read_transform_output_data(GstBuffer *buffer, GstCaps *caps, gsize plane_align, - struct wg_sample *sample) +static void set_sample_flags_from_buffer(struct wg_sample *sample, GstBuffer *buffer, gsize total_size) +{ + if (GST_BUFFER_PTS_IS_VALID(buffer)) + { + sample->flags |= WG_SAMPLE_FLAG_HAS_PTS; + sample->pts = GST_BUFFER_PTS(buffer) / 100; + } + if (GST_BUFFER_DURATION_IS_VALID(buffer)) + { + GstClockTime duration = GST_BUFFER_DURATION(buffer) / 100; + + duration = (duration * sample->size) / total_size; + GST_BUFFER_DURATION(buffer) -= duration * 100; + if (GST_BUFFER_PTS_IS_VALID(buffer)) + GST_BUFFER_PTS(buffer) += duration * 100; + + sample->flags |= WG_SAMPLE_FLAG_HAS_DURATION; + sample->duration = duration; + } + if (!GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) + sample->flags |= WG_SAMPLE_FLAG_SYNC_POINT; + if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT)) + sample->flags |= WG_SAMPLE_FLAG_DISCONTINUITY; +} + +static bool sample_needs_buffer_copy(struct wg_sample *sample, GstBuffer *buffer, gsize *total_size) { - gsize total_size; - bool needs_copy; - NTSTATUS status; GstMapInfo info; + bool needs_copy; if (!gst_buffer_map(buffer, &info, GST_MAP_READ)) { @@ -786,15 +1013,23 @@ static NTSTATUS read_transform_output_data(GstBuffer *buffer, GstCaps *caps, gsi return STATUS_UNSUCCESSFUL; } needs_copy = info.data != wg_sample_data(sample); - total_size = sample->size = info.size; + *total_size = sample->size = info.size; gst_buffer_unmap(buffer, &info); - if (!needs_copy) + return needs_copy; +} + +static NTSTATUS read_transform_output_video(struct wg_sample *sample, GstBuffer *buffer, + const GstVideoInfo *src_video_info, const GstVideoInfo *dst_video_info) +{ + gsize total_size; + NTSTATUS status; + bool needs_copy; + + if (!(needs_copy = sample_needs_buffer_copy(sample, buffer, &total_size))) status = STATUS_SUCCESS; - else if (stream_type_from_caps(caps) == GST_STREAM_TYPE_VIDEO) - status = copy_video_buffer(buffer, caps, plane_align, sample, &total_size); else - status = copy_buffer(buffer, caps, sample, &total_size); + status = copy_video_buffer(buffer, src_video_info, dst_video_info, sample, &total_size); if (status) { @@ -803,35 +1038,40 @@ static NTSTATUS read_transform_output_data(GstBuffer *buffer, GstCaps *caps, gsi return status; } - if (GST_BUFFER_PTS_IS_VALID(buffer)) - { - sample->flags |= WG_SAMPLE_FLAG_HAS_PTS; - sample->pts = GST_BUFFER_PTS(buffer) / 100; - } - if (GST_BUFFER_DURATION_IS_VALID(buffer)) - { - GstClockTime duration = GST_BUFFER_DURATION(buffer) / 100; + set_sample_flags_from_buffer(sample, buffer, total_size); - duration = (duration * sample->size) / total_size; - GST_BUFFER_DURATION(buffer) -= duration * 100; - if (GST_BUFFER_PTS_IS_VALID(buffer)) - GST_BUFFER_PTS(buffer) += duration * 100; + if (needs_copy) + GST_WARNING("Copied %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); + else if (sample->flags & WG_SAMPLE_FLAG_INCOMPLETE) + GST_ERROR("Partial read %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); + else + GST_INFO("Read %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); - sample->flags |= WG_SAMPLE_FLAG_HAS_DURATION; - sample->duration = duration; - } - if (!GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT)) - sample->flags |= WG_SAMPLE_FLAG_SYNC_POINT; - if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DISCONT)) - sample->flags |= WG_SAMPLE_FLAG_DISCONTINUITY; + return STATUS_SUCCESS; +} - if (needs_copy) +static NTSTATUS read_transform_output(struct wg_sample *sample, GstBuffer *buffer) +{ + gsize total_size; + NTSTATUS status; + bool needs_copy; + + if (!(needs_copy = sample_needs_buffer_copy(sample, buffer, &total_size))) + status = STATUS_SUCCESS; + else + status = copy_buffer(buffer, sample, &total_size); + + if (status) { - if (stream_type_from_caps(caps) == GST_STREAM_TYPE_VIDEO) - GST_WARNING("Copied %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); - else - GST_INFO("Copied %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); + GST_ERROR("Failed to copy buffer %"GST_PTR_FORMAT, buffer); + sample->size = 0; + return status; } + + set_sample_flags_from_buffer(sample, buffer, total_size); + + if (needs_copy) + GST_INFO("Copied %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); else if (sample->flags & WG_SAMPLE_FLAG_INCOMPLETE) GST_ERROR("Partial read %u bytes, sample %p, flags %#x", sample->size, sample, sample->flags); else @@ -840,6 +1080,33 @@ static NTSTATUS read_transform_output_data(GstBuffer *buffer, GstCaps *caps, gsi return STATUS_SUCCESS; } +static NTSTATUS complete_drain(struct wg_transform *transform) +{ + if (transform->draining && gst_atomic_queue_length(transform->input_queue) == 0) + { + GstEvent *event; + transform->draining = false; + if (!(event = gst_event_new_segment_done(GST_FORMAT_TIME, -1)) + || !push_event(transform->my_src, event)) + goto error; + if (!(event = gst_event_new_eos()) + || !push_event(transform->my_src, event)) + goto error; + if (!(event = gst_event_new_stream_start("stream")) + || !push_event(transform->my_src, event)) + goto error; + if (!(event = gst_event_new_segment(&transform->segment)) + || !push_event(transform->my_src, event)) + goto error; + } + + return STATUS_SUCCESS; + +error: + GST_ERROR("Failed to drain transform %p.", transform); + return STATUS_UNSUCCESSFUL; +} + static bool get_transform_output(struct wg_transform *transform, struct wg_sample *sample) { GstBuffer *input_buffer; @@ -852,6 +1119,8 @@ static bool get_transform_output(struct wg_transform *transform, struct wg_sampl { if ((ret = gst_pad_push(transform->my_src, input_buffer))) GST_WARNING("Failed to push transform input, error %d", ret); + + complete_drain(transform); } /* Remove the sample so the allocator cannot use it */ @@ -864,9 +1133,11 @@ NTSTATUS wg_transform_read_data(void *args) { struct wg_transform_read_data_params *params = args; struct wg_transform *transform = get_transform(params->transform); + GstVideoInfo src_video_info, dst_video_info; struct wg_sample *sample = params->sample; - struct wg_format *format = params->format; + GstVideoAlignment align = {0}; GstBuffer *output_buffer; + const char *output_mime; GstCaps *output_caps; bool discard_data; NTSTATUS status; @@ -882,48 +1153,44 @@ NTSTATUS wg_transform_read_data(void *args) output_buffer = gst_sample_get_buffer(transform->output_sample); output_caps = gst_sample_get_caps(transform->output_sample); + output_mime = gst_structure_get_name(gst_caps_get_structure(output_caps, 0)); - if (GST_MINI_OBJECT_FLAG_IS_SET(transform->output_sample, GST_SAMPLE_FLAG_WG_CAPS_CHANGED)) + if (!strcmp(output_mime, "video/x-raw")) { - GST_MINI_OBJECT_FLAG_UNSET(transform->output_sample, GST_SAMPLE_FLAG_WG_CAPS_CHANGED); + gsize plane_align = transform->attrs.output_plane_align; + GstVideoMeta *meta; + + if (!gst_video_info_from_caps(&src_video_info, output_caps)) + GST_ERROR("Failed to get video info from %"GST_PTR_FORMAT, output_caps); + dst_video_info = src_video_info; - GST_INFO("transform %p output caps %"GST_PTR_FORMAT, transform, output_caps); + /* set the desired output buffer alignment and stride on the dest video info */ + align_video_info_planes(&transform->output_info, plane_align, &dst_video_info, &align); - if (format) + /* copy the actual output buffer alignment and stride to the src video info */ + if ((meta = gst_buffer_get_video_meta(output_buffer))) { - gsize plane_align = transform->attrs.output_plane_align; - GstVideoAlignment align; - GstVideoInfo info; - - wg_format_from_caps(format, output_caps); - - if (format->major_type == WG_MAJOR_TYPE_VIDEO - && gst_video_info_from_caps(&info, output_caps)) - { - align_video_info_planes(plane_align, &info, &align); - - GST_INFO("Returning video alignment left %u, top %u, right %u, bottom %u.", align.padding_left, - align.padding_top, align.padding_right, align.padding_bottom); - - format->u.video.padding.left = align.padding_left; - format->u.video.width += format->u.video.padding.left; - format->u.video.padding.right = align.padding_right; - format->u.video.width += format->u.video.padding.right; - format->u.video.padding.top = align.padding_top; - format->u.video.height += format->u.video.padding.top; - format->u.video.padding.bottom = align.padding_bottom; - format->u.video.height += format->u.video.padding.bottom; - } + memcpy(src_video_info.offset, meta->offset, sizeof(meta->offset)); + memcpy(src_video_info.stride, meta->stride, sizeof(meta->stride)); } + } + if (GST_MINI_OBJECT_FLAG_IS_SET(transform->output_sample, GST_SAMPLE_FLAG_WG_CAPS_CHANGED)) + { + GST_MINI_OBJECT_FLAG_UNSET(transform->output_sample, GST_SAMPLE_FLAG_WG_CAPS_CHANGED); params->result = MF_E_TRANSFORM_STREAM_CHANGE; GST_INFO("Format changed detected, returning no output"); wg_allocator_release_sample(transform->allocator, sample, false); return STATUS_SUCCESS; } - if ((status = read_transform_output_data(output_buffer, output_caps, - transform->attrs.output_plane_align, sample))) + if (!strcmp(output_mime, "video/x-raw")) + status = read_transform_output_video(sample, output_buffer, + &src_video_info, &dst_video_info); + else + status = read_transform_output(sample, output_buffer); + + if (status) { wg_allocator_release_sample(transform->allocator, sample, false); return status; @@ -969,36 +1236,12 @@ NTSTATUS wg_transform_get_status(void *args) NTSTATUS wg_transform_drain(void *args) { struct wg_transform *transform = get_transform(*(wg_transform_t *)args); - GstBuffer *input_buffer; - GstFlowReturn ret; - GstEvent *event; - - GST_LOG("transform %p", transform); - - while ((input_buffer = gst_atomic_queue_pop(transform->input_queue))) - { - if ((ret = gst_pad_push(transform->my_src, input_buffer))) - GST_WARNING("Failed to push transform input, error %d", ret); - } - if (!(event = gst_event_new_segment_done(GST_FORMAT_TIME, -1)) - || !push_event(transform->my_src, event)) - goto error; - if (!(event = gst_event_new_eos()) - || !push_event(transform->my_src, event)) - goto error; - if (!(event = gst_event_new_stream_start("stream")) - || !push_event(transform->my_src, event)) - goto error; - if (!(event = gst_event_new_segment(&transform->segment)) - || !push_event(transform->my_src, event)) - goto error; + GST_LOG("transform %p, draining %d buffers", transform, gst_atomic_queue_length(transform->input_queue)); - return STATUS_SUCCESS; + transform->draining = true; -error: - GST_ERROR("Failed to drain transform %p.", transform); - return STATUS_UNSUCCESSFUL; + return complete_drain(transform); } NTSTATUS wg_transform_flush(void *args) diff --git a/dlls/winegstreamer/wm_reader.c b/dlls/winegstreamer/wm_reader.c index 3afd021bcac..16b5ca3bcd1 100644 --- a/dlls/winegstreamer/wm_reader.c +++ b/dlls/winegstreamer/wm_reader.c @@ -1490,7 +1490,7 @@ static HRESULT init_stream(struct wm_reader *reader) stream->reader = reader; stream->index = i; stream->selection = WMT_ON; - wg_parser_stream_get_preferred_format(stream->wg_stream, &stream->format); + wg_parser_stream_get_current_format(stream->wg_stream, &stream->format); if (stream->format.major_type == WG_MAJOR_TYPE_AUDIO) { /* R.U.S.E enumerates available audio types, picks the first one it @@ -1527,7 +1527,7 @@ static HRESULT init_stream(struct wm_reader *reader) stream->format.u.video.format = WG_VIDEO_FORMAT_BGRx; } } - wg_parser_stream_enable(stream->wg_stream, &stream->format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_enable(stream->wg_stream, &stream->format); } /* We probably discarded events because streams weren't enabled yet. @@ -1600,9 +1600,9 @@ static HRESULT reinit_stream(struct wm_reader *reader, bool read_compressed) stream->wg_stream = wg_parser_get_stream(reader->wg_parser, reader->stream_count - i - 1); stream->reader = reader; - wg_parser_stream_get_preferred_format(stream->wg_stream, &format); + wg_parser_stream_get_current_format(stream->wg_stream, &format); if (stream->selection == WMT_ON) - wg_parser_stream_enable(stream->wg_stream, read_compressed ? &format : &stream->format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_enable(stream->wg_stream, read_compressed ? &format : &stream->format); } /* We probably discarded events because streams weren't enabled yet. @@ -1676,10 +1676,6 @@ static const char *get_major_type_string(enum wg_major_type type) return "indeo"; case WG_MAJOR_TYPE_VIDEO_MPEG1: return "mpeg1-video"; - case WG_MAJOR_TYPE_AUDIO_ENCODED: - return "unknown-audio"; - case WG_MAJOR_TYPE_VIDEO_ENCODED: - return "unknown-video"; case WG_MAJOR_TYPE_UNKNOWN: return "unknown"; } @@ -2022,7 +2018,7 @@ static HRESULT WINAPI reader_GetOutputFormat(IWMSyncReader2 *iface, return E_INVALIDARG; } - wg_parser_stream_get_preferred_format(stream->wg_stream, &format); + wg_parser_stream_get_current_format(stream->wg_stream, &format); switch (format.major_type) { @@ -2050,13 +2046,11 @@ static HRESULT WINAPI reader_GetOutputFormat(IWMSyncReader2 *iface, case WG_MAJOR_TYPE_AUDIO_MPEG1: case WG_MAJOR_TYPE_AUDIO_MPEG4: case WG_MAJOR_TYPE_AUDIO_WMA: - case WG_MAJOR_TYPE_AUDIO_ENCODED: case WG_MAJOR_TYPE_VIDEO_CINEPAK: case WG_MAJOR_TYPE_VIDEO_H264: case WG_MAJOR_TYPE_VIDEO_WMV: case WG_MAJOR_TYPE_VIDEO_INDEO: case WG_MAJOR_TYPE_VIDEO_MPEG1: - case WG_MAJOR_TYPE_VIDEO_ENCODED: FIXME("Format %u not implemented!\n", format.major_type); break; case WG_MAJOR_TYPE_UNKNOWN: @@ -2085,7 +2079,7 @@ static HRESULT WINAPI reader_GetOutputFormatCount(IWMSyncReader2 *iface, DWORD o return E_INVALIDARG; } - wg_parser_stream_get_preferred_format(stream->wg_stream, &format); + wg_parser_stream_get_current_format(stream->wg_stream, &format); switch (format.major_type) { case WG_MAJOR_TYPE_VIDEO: @@ -2095,13 +2089,11 @@ static HRESULT WINAPI reader_GetOutputFormatCount(IWMSyncReader2 *iface, DWORD o case WG_MAJOR_TYPE_AUDIO_MPEG1: case WG_MAJOR_TYPE_AUDIO_MPEG4: case WG_MAJOR_TYPE_AUDIO_WMA: - case WG_MAJOR_TYPE_AUDIO_ENCODED: case WG_MAJOR_TYPE_VIDEO_CINEPAK: case WG_MAJOR_TYPE_VIDEO_H264: case WG_MAJOR_TYPE_VIDEO_WMV: case WG_MAJOR_TYPE_VIDEO_INDEO: case WG_MAJOR_TYPE_VIDEO_MPEG1: - case WG_MAJOR_TYPE_VIDEO_ENCODED: FIXME("Format %u not implemented!\n", format.major_type); /* fallthrough */ case WG_MAJOR_TYPE_AUDIO: @@ -2338,7 +2330,7 @@ static HRESULT WINAPI reader_SetOutputProps(IWMSyncReader2 *iface, DWORD output, return E_INVALIDARG; } - wg_parser_stream_get_preferred_format(stream->wg_stream, &pref_format); + wg_parser_stream_get_current_format(stream->wg_stream, &pref_format); if (pref_format.major_type != format.major_type) { /* R.U.S.E sets the type of the wrong stream, apparently by accident. */ @@ -2378,7 +2370,7 @@ static HRESULT WINAPI reader_SetOutputProps(IWMSyncReader2 *iface, DWORD output, } stream->format = format; - wg_parser_stream_enable(stream->wg_stream, &format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_enable(stream->wg_stream, &format); /* Re-decode any buffers that might have been generated with the old format. * @@ -2527,12 +2519,12 @@ static HRESULT WINAPI reader_SetStreamsSelected(IWMSyncReader2 *iface, if (stream->read_compressed) { struct wg_format format; - wg_parser_stream_get_preferred_format(stream->wg_stream, &format); - wg_parser_stream_enable(stream->wg_stream, &format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_get_current_format(stream->wg_stream, &format); + wg_parser_stream_enable(stream->wg_stream, &format); } else { - wg_parser_stream_enable(stream->wg_stream, &stream->format, STREAM_ENABLE_FLAG_FLIP_RGB); + wg_parser_stream_enable(stream->wg_stream, &stream->format); } } } diff --git a/dlls/winegstreamer/wma_decoder.c b/dlls/winegstreamer/wma_decoder.c index 34b2bba9ebe..98587bb224d 100644 --- a/dlls/winegstreamer/wma_decoder.c +++ b/dlls/winegstreamer/wma_decoder.c @@ -25,6 +25,7 @@ #include "mftransform.h" #include "wmcodecdsp.h" #include "mediaerr.h" +#include "dmort.h" #include "wine/debug.h" @@ -55,8 +56,8 @@ struct wma_decoder IUnknown *outer; LONG refcount; - struct wg_format input_format; - struct wg_format output_format; + DMO_MEDIA_TYPE input_type; + DMO_MEDIA_TYPE output_type; DWORD input_buf_size; DWORD output_buf_size; @@ -75,19 +76,13 @@ static HRESULT try_create_wg_transform(struct wma_decoder *decoder) struct wg_transform_attrs attrs = {0}; if (decoder->wg_transform) + { wg_transform_destroy(decoder->wg_transform); - decoder->wg_transform = 0; - - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - if (decoder->output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) - return MF_E_INVALIDMEDIATYPE; - - if (!(decoder->wg_transform = wg_transform_create(&decoder->input_format, &decoder->output_format, &attrs))) - return E_FAIL; + decoder->wg_transform = 0; + } - return S_OK; + return wg_transform_create_quartz(&decoder->input_type, &decoder->output_type, + &attrs, &decoder->wg_transform); } static HRESULT WINAPI unknown_QueryInterface(IUnknown *iface, REFIID iid, void **out) @@ -138,6 +133,9 @@ static ULONG WINAPI unknown_Release(IUnknown *iface) wg_transform_destroy(decoder->wg_transform); wg_sample_queue_destroy(decoder->wg_sample_queue); + + MoFreeMediaType(&decoder->input_type); + MoFreeMediaType(&decoder->output_type); free(decoder); } @@ -204,8 +202,8 @@ static HRESULT WINAPI transform_GetInputStreamInfo(IMFTransform *iface, DWORD id TRACE("iface %p, id %lu, info %p.\n", iface, id, info); - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN - || decoder->output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL) + || IsEqualGUID(&decoder->output_type.majortype, &GUID_NULL)) { memset(info, 0, sizeof(*info)); return MF_E_TRANSFORM_TYPE_NOT_SET; @@ -225,8 +223,8 @@ static HRESULT WINAPI transform_GetOutputStreamInfo(IMFTransform *iface, DWORD i TRACE("iface %p, id %lu, info %p.\n", iface, id, info); - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN - || decoder->output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL) + || IsEqualGUID(&decoder->output_type.majortype, &GUID_NULL)) { memset(info, 0, sizeof(*info)); return MF_E_TRANSFORM_TYPE_NOT_SET; @@ -282,11 +280,12 @@ static HRESULT WINAPI transform_GetOutputAvailableType(IMFTransform *iface, DWOR struct wma_decoder *decoder = impl_from_IMFTransform(iface); IMFMediaType *media_type; const GUID *output_type; + WAVEFORMATEX *wfx; HRESULT hr; TRACE("iface %p, id %lu, index %lu, type %p.\n", iface, id, index, type); - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL)) return MF_E_TRANSFORM_TYPE_NOT_SET; *type = NULL; @@ -318,20 +317,16 @@ static HRESULT WINAPI transform_GetOutputAvailableType(IMFTransform *iface, DWOR sample_size))) goto done; - if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, - decoder->input_format.u.audio.channels))) + wfx = (WAVEFORMATEX *)decoder->input_type.pbFormat; + if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, wfx->nChannels))) goto done; - - if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, - decoder->input_format.u.audio.rate))) + if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, wfx->nSamplesPerSec))) goto done; - block_alignment = sample_size * decoder->input_format.u.audio.channels / 8; - if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, - block_alignment))) + block_alignment = sample_size * wfx->nChannels / 8; + if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, block_alignment))) goto done; - if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, - decoder->input_format.u.audio.rate * block_alignment))) + if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, wfx->nSamplesPerSec * block_alignment))) goto done; if (FAILED(hr = IMFMediaType_SetUINT32(media_type, &MF_MT_ALL_SAMPLES_INDEPENDENT, 1))) @@ -387,9 +382,13 @@ static HRESULT WINAPI transform_SetInputType(IMFTransform *iface, DWORD id, IMFM if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; - mf_media_type_to_wg_format(type, &decoder->input_format); - decoder->input_buf_size = block_alignment; - decoder->output_format.major_type = WG_MAJOR_TYPE_UNKNOWN; + MoFreeMediaType(&decoder->output_type); + memset(&decoder->output_type, 0, sizeof(decoder->output_type)); + MoFreeMediaType(&decoder->input_type); + memset(&decoder->input_type, 0, sizeof(decoder->input_type)); + + if (SUCCEEDED(hr = MFInitAMMediaTypeFromMFMediaType(type, GUID_NULL, &decoder->input_type))) + decoder->input_buf_size = block_alignment; return hr; } @@ -405,7 +404,7 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF TRACE("iface %p, id %lu, type %p, flags %#lx.\n", iface, id, type, flags); - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL)) return MF_E_TRANSFORM_TYPE_NOT_SET; if (FAILED(hr = IMFMediaType_GetGUID(type, &MF_MT_MAJOR_TYPE, &major)) || @@ -448,10 +447,15 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF if (flags & MFT_SET_TYPE_TEST_ONLY) return S_OK; - decoder->input_format.u.audio.depth = sample_size; + MoFreeMediaType(&decoder->output_type); + memset(&decoder->output_type, 0, sizeof(decoder->output_type)); - mf_media_type_to_wg_format(type, &decoder->output_format); - decoder->output_buf_size = 1024 * block_alignment * channel_count; + if (SUCCEEDED(hr = MFInitAMMediaTypeFromMFMediaType(type, GUID_NULL, &decoder->output_type))) + { + WAVEFORMATEX *wfx = (WAVEFORMATEX *)decoder->input_type.pbFormat; + wfx->wBitsPerSample = sample_size; + decoder->output_buf_size = 1024 * block_alignment * channel_count; + } if (FAILED(hr = try_create_wg_transform(decoder))) goto failed; @@ -459,7 +463,8 @@ static HRESULT WINAPI transform_SetOutputType(IMFTransform *iface, DWORD id, IMF return S_OK; failed: - decoder->output_format.major_type = WG_MAJOR_TYPE_UNKNOWN; + MoFreeMediaType(&decoder->output_type); + memset(&decoder->output_type, 0, sizeof(decoder->output_type)); return hr; } @@ -565,7 +570,7 @@ static HRESULT WINAPI transform_ProcessOutput(IMFTransform *iface, DWORD flags, return hr; if (SUCCEEDED(hr = wg_transform_read_mf(decoder->wg_transform, samples->pSample, - info.cbSize, NULL, &samples->dwStatus))) + info.cbSize, &samples->dwStatus))) wg_sample_queue_flush(decoder->wg_sample_queue, false); return hr; @@ -669,7 +674,9 @@ static HRESULT WINAPI media_object_GetOutputType(IMediaObject *iface, DWORD inde DMO_MEDIA_TYPE *type) { struct wma_decoder *decoder = impl_from_IMediaObject(iface); - WAVEFORMATEX *wfx; + UINT32 depth, channels, rate; + IMFMediaType *media_type; + HRESULT hr; TRACE("iface %p, index %lu, type_index %lu, type %p\n", iface, index, type_index, type); @@ -677,42 +684,44 @@ static HRESULT WINAPI media_object_GetOutputType(IMediaObject *iface, DWORD inde return DMO_E_INVALIDSTREAMINDEX; if (type_index >= 1) return DMO_E_NO_MORE_ITEMS; - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL)) return DMO_E_TYPE_NOT_SET; if (!type) return S_OK; - memset(type, 0, sizeof(*type)); - type->majortype = MFMediaType_Audio; - type->subtype = MEDIASUBTYPE_PCM; - type->formattype = FORMAT_WaveFormatEx; - type->bFixedSizeSamples = FALSE; - type->bTemporalCompression = TRUE; - type->lSampleSize = 0; - - type->cbFormat = sizeof(WAVEFORMATEX); - type->pbFormat = CoTaskMemAlloc(type->cbFormat); - memset(type->pbFormat, 0, type->cbFormat); + if (FAILED(hr = MFCreateMediaTypeFromRepresentation(AM_MEDIA_TYPE_REPRESENTATION, + &decoder->input_type, &media_type))) + return hr; - wfx = (WAVEFORMATEX *)type->pbFormat; - if (decoder->input_format.u.audio.depth == 32) - wfx->wFormatTag = WAVE_FORMAT_IEEE_FLOAT; + if (SUCCEEDED(IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_BITS_PER_SAMPLE, &depth)) + && depth == 32) + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_Float); else - wfx->wFormatTag = WAVE_FORMAT_PCM; - wfx->nChannels = decoder->input_format.u.audio.channels; - wfx->nSamplesPerSec = decoder->input_format.u.audio.rate; - wfx->wBitsPerSample = decoder->input_format.u.audio.depth; - wfx->nAvgBytesPerSec = wfx->nChannels * wfx->nSamplesPerSec * wfx->wBitsPerSample / 8; - wfx->nBlockAlign = wfx->nChannels * wfx->wBitsPerSample / 8; + hr = IMFMediaType_SetGUID(media_type, &MF_MT_SUBTYPE, &MFAudioFormat_PCM); - return S_OK; + if (SUCCEEDED(hr)) + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_NUM_CHANNELS, &channels); + if (SUCCEEDED(hr)) + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_BLOCK_ALIGNMENT, depth * channels / 8); + + if (SUCCEEDED(hr)) + hr = IMFMediaType_GetUINT32(media_type, &MF_MT_AUDIO_SAMPLES_PER_SECOND, &rate); + if (SUCCEEDED(hr)) + hr = IMFMediaType_SetUINT32(media_type, &MF_MT_AUDIO_AVG_BYTES_PER_SECOND, depth * channels / 8 * rate); + + if (SUCCEEDED(hr)) + hr = IMFMediaType_DeleteItem(media_type, &MF_MT_USER_DATA); + if (SUCCEEDED(hr)) + hr = MFInitAMMediaTypeFromMFMediaType(media_type, GUID_NULL, type); + + IMFMediaType_Release(media_type); + return hr; } static HRESULT WINAPI media_object_SetInputType(IMediaObject *iface, DWORD index, const DMO_MEDIA_TYPE *type, DWORD flags) { struct wma_decoder *decoder = impl_from_IMediaObject(iface); - struct wg_format wg_format; TRACE("iface %p, index %lu, type %p, flags %#lx.\n", iface, index, type, flags); @@ -723,7 +732,8 @@ static HRESULT WINAPI media_object_SetInputType(IMediaObject *iface, DWORD index { if (flags != DMO_SET_TYPEF_CLEAR) return E_INVALIDARG; - memset(&decoder->input_format, 0, sizeof(decoder->input_format)); + MoFreeMediaType(&decoder->input_type); + memset(&decoder->input_type, 0, sizeof(decoder->input_type)); if (decoder->wg_transform) { wg_transform_destroy(decoder->wg_transform); @@ -739,13 +749,13 @@ static HRESULT WINAPI media_object_SetInputType(IMediaObject *iface, DWORD index if (!IsEqualGUID(&type->majortype, &MEDIATYPE_Audio)) return DMO_E_TYPE_NOT_ACCEPTED; - if (!amt_to_wg_format((const AM_MEDIA_TYPE *)type, &wg_format)) - return DMO_E_TYPE_NOT_ACCEPTED; - if (flags & DMO_SET_TYPEF_TEST_ONLY) return S_OK; - decoder->input_format = wg_format; + MoFreeMediaType(&decoder->input_type); + memset(&decoder->input_type, 0, sizeof(decoder->input_type)); + MoCopyMediaType(&decoder->input_type, type); + if (decoder->wg_transform) { wg_transform_destroy(decoder->wg_transform); @@ -760,8 +770,8 @@ static HRESULT WINAPI media_object_SetOutputType(IMediaObject *iface, DWORD inde { struct wma_decoder *decoder = impl_from_IMediaObject(iface); struct wg_transform_attrs attrs = {0}; - struct wg_format wg_format; unsigned int i; + HRESULT hr; TRACE("iface %p, index %lu, type %p, flags %#lx,\n", iface, index, type, flags); @@ -772,7 +782,8 @@ static HRESULT WINAPI media_object_SetOutputType(IMediaObject *iface, DWORD inde { if (flags != DMO_SET_TYPEF_CLEAR) return E_INVALIDARG; - memset(&decoder->output_format, 0, sizeof(decoder->output_format)); + MoFreeMediaType(&decoder->output_type); + memset(&decoder->output_type, 0, sizeof(decoder->output_type)); if (decoder->wg_transform) { wg_transform_destroy(decoder->wg_transform); @@ -794,18 +805,14 @@ static HRESULT WINAPI media_object_SetOutputType(IMediaObject *iface, DWORD inde if (i == ARRAY_SIZE(wma_decoder_output_types)) return DMO_E_TYPE_NOT_ACCEPTED; - - if (!amt_to_wg_format((const AM_MEDIA_TYPE *)type, &wg_format)) - return DMO_E_TYPE_NOT_ACCEPTED; - assert(wg_format.major_type == WG_MAJOR_TYPE_AUDIO); - - if (decoder->input_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL)) return DMO_E_TYPE_NOT_SET; - if (flags & DMO_SET_TYPEF_TEST_ONLY) return S_OK; - decoder->output_format = wg_format; + MoFreeMediaType(&decoder->output_type); + memset(&decoder->output_type, 0, sizeof(decoder->output_type)); + MoCopyMediaType(&decoder->output_type, type); /* Set up wg_transform. */ if (decoder->wg_transform) @@ -813,22 +820,41 @@ static HRESULT WINAPI media_object_SetOutputType(IMediaObject *iface, DWORD inde wg_transform_destroy(decoder->wg_transform); decoder->wg_transform = 0; } - if (!(decoder->wg_transform = wg_transform_create(&decoder->input_format, &decoder->output_format, &attrs))) - return E_FAIL; + if (FAILED(hr = wg_transform_create_quartz(&decoder->input_type, &decoder->output_type, + &attrs, &decoder->wg_transform))) + return hr; return S_OK; } static HRESULT WINAPI media_object_GetInputCurrentType(IMediaObject *iface, DWORD index, DMO_MEDIA_TYPE *type) { - FIXME("iface %p, index %lu, type %p stub!\n", iface, index, type); - return E_NOTIMPL; + struct wma_decoder *decoder = impl_from_IMediaObject(iface); + + TRACE("iface %p, index %lu, type %p\n", iface, index, type); + + if (index) + return DMO_E_INVALIDSTREAMINDEX; + if (IsEqualGUID(&decoder->input_type.majortype, &GUID_NULL)) + return DMO_E_TYPE_NOT_SET; + if (!type) + return E_POINTER; + return MoCopyMediaType(type, &decoder->input_type); } static HRESULT WINAPI media_object_GetOutputCurrentType(IMediaObject *iface, DWORD index, DMO_MEDIA_TYPE *type) { - FIXME("iface %p, index %lu, type %p stub!\n", iface, index, type); - return E_NOTIMPL; + struct wma_decoder *decoder = impl_from_IMediaObject(iface); + + TRACE("iface %p, index %lu, type %p\n", iface, index, type); + + if (index) + return DMO_E_INVALIDSTREAMINDEX; + if (IsEqualGUID(&decoder->output_type.majortype, &GUID_NULL)) + return DMO_E_TYPE_NOT_SET; + if (!type) + return E_POINTER; + return MoCopyMediaType(type, &decoder->output_type); } static HRESULT WINAPI media_object_GetInputSizeInfo(IMediaObject *iface, DWORD index, DWORD *size, @@ -849,7 +875,7 @@ static HRESULT WINAPI media_object_GetOutputSizeInfo(IMediaObject *iface, DWORD return E_POINTER; if (index > 0) return DMO_E_INVALIDSTREAMINDEX; - if (decoder->output_format.major_type == WG_MAJOR_TYPE_UNKNOWN) + if (IsEqualGUID(&decoder->output_type.majortype, &GUID_NULL)) return DMO_E_TYPE_NOT_SET; *size = 8192; diff --git a/dlls/ws2_32/socket.c b/dlls/ws2_32/socket.c index 6aab249a1b8..eb84558cbac 100644 --- a/dlls/ws2_32/socket.c +++ b/dlls/ws2_32/socket.c @@ -1931,6 +1931,36 @@ int WINAPI getsockopt( SOCKET s, int level, int optname, char *optval, int *optl *optlen = 1; return server_getsockopt( s, IOCTL_AFD_WINE_GET_TCP_NODELAY, optval, optlen ); + case TCP_KEEPALIVE: + if (*optlen < sizeof(DWORD) || !optval) + { + *optlen = 0; + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + *optlen = sizeof(DWORD); + return server_getsockopt( s, IOCTL_AFD_WINE_GET_TCP_KEEPALIVE, optval, optlen ); + + case TCP_KEEPCNT: + if (*optlen < sizeof(DWORD) || !optval) + { + *optlen = 0; + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + *optlen = sizeof(DWORD); + return server_getsockopt( s, IOCTL_AFD_WINE_GET_TCP_KEEPCNT, optval, optlen ); + + case TCP_KEEPINTVL: + if (*optlen < sizeof(DWORD) || !optval) + { + *optlen = 0; + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + *optlen = sizeof(DWORD); + return server_getsockopt( s, IOCTL_AFD_WINE_GET_TCP_KEEPINTVL, optval, optlen ); + default: FIXME( "unrecognized TCP option %#x\n", optname ); SetLastError( WSAENOPROTOOPT ); @@ -3325,6 +3355,12 @@ int WINAPI setsockopt( SOCKET s, int level, int optname, const char *optval, int break; /* case NSPROTO_IPX */ case IPPROTO_TCP: + if (optlen < 0) + { + SetLastError(WSAENOBUFS); + return SOCKET_ERROR; + } + switch(optname) { case TCP_NODELAY: @@ -3336,6 +3372,33 @@ int WINAPI setsockopt( SOCKET s, int level, int optname, const char *optval, int value = *optval; return server_setsockopt( s, IOCTL_AFD_WINE_SET_TCP_NODELAY, (char*)&value, sizeof(value) ); + case TCP_KEEPALIVE: + if (optlen < sizeof(DWORD) || !optval) + { + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + value = *(DWORD*)optval; + return server_setsockopt( s, IOCTL_AFD_WINE_SET_TCP_KEEPALIVE, (char*)&value, sizeof(value) ); + + case TCP_KEEPCNT: + if (optlen < sizeof(DWORD) || !optval) + { + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + value = *(DWORD*)optval; + return server_setsockopt( s, IOCTL_AFD_WINE_SET_TCP_KEEPCNT, (char*)&value, sizeof(value) ); + + case TCP_KEEPINTVL: + if (optlen < sizeof(DWORD) || !optval) + { + SetLastError( WSAEFAULT ); + return SOCKET_ERROR; + } + value = *(DWORD*)optval; + return server_setsockopt( s, IOCTL_AFD_WINE_SET_TCP_KEEPINTVL, (char*)&value, sizeof(value) ); + default: FIXME("Unknown IPPROTO_TCP optname 0x%08x\n", optname); SetLastError(WSAENOPROTOOPT); diff --git a/dlls/ws2_32/tests/sock.c b/dlls/ws2_32/tests/sock.c index 807018efa2c..c390b186956 100644 --- a/dlls/ws2_32/tests/sock.c +++ b/dlls/ws2_32/tests/sock.c @@ -1193,6 +1193,7 @@ static void test_set_getsockopt(void) DWORD values[3]; BOOL accepts_large_value; BOOL bool_value; + BOOL allow_noprotoopt; /* for old windows only, must work on wine */ } test_optsize[] = { @@ -1210,6 +1211,9 @@ static void test_set_getsockopt(void) {AF_INET, SOCK_STREAM, SOL_SOCKET, SO_SNDTIMEO, FALSE, {1, 2, 4}, {0}, TRUE}, {AF_INET, SOCK_STREAM, SOL_SOCKET, SO_OPENTYPE, FALSE, {1, 2, 4}, {0}, TRUE}, {AF_INET, SOCK_STREAM, IPPROTO_TCP, TCP_NODELAY, TRUE, {1, 1, 1}, {0}, TRUE}, + {AF_INET, SOCK_STREAM, IPPROTO_TCP, TCP_KEEPALIVE, FALSE, {0, 0, 4}, {0}, TRUE}, + {AF_INET, SOCK_STREAM, IPPROTO_TCP, TCP_KEEPCNT, FALSE, {0, 0, 4}, {0}, FALSE, FALSE, TRUE}, /* win10+ */ + {AF_INET, SOCK_STREAM, IPPROTO_TCP, TCP_KEEPINTVL, FALSE, {0, 0, 4}, {0}, TRUE, FALSE, TRUE}, /* win10+ */ {AF_INET, SOCK_DGRAM, IPPROTO_IP, IP_MULTICAST_LOOP, TRUE, {1, 1, 4}, {0}, TRUE, TRUE}, {AF_INET, SOCK_DGRAM, IPPROTO_IP, IP_MULTICAST_TTL, TRUE, {1, 1, 4}, {0}, FALSE}, {AF_INET, SOCK_DGRAM, IPPROTO_IP, IP_PKTINFO, FALSE, {0, 0, 4}, {0}, TRUE, TRUE}, @@ -1453,6 +1457,36 @@ static void test_set_getsockopt(void) ok(!err, "getsockopt TCP_NODELAY failed\n"); ok(!value, "TCP_NODELAY should be 0\n"); + size = sizeof(DWORD); + value = 3600; + err = setsockopt(s, IPPROTO_TCP, TCP_KEEPALIVE, (char*)&value, 4); + ok(!err, "setsockopt TCP_KEEPALIVE failed\n"); + value = 0; + err = getsockopt(s, IPPROTO_TCP, TCP_KEEPALIVE, (char*)&value, &size); + ok(!err, "getsockopt TCP_KEEPALIVE failed\n"); + ok(value == 3600, "TCP_KEEPALIVE should be 3600, is %ld\n", value); + + /* TCP_KEEPCNT and TCP_KEEPINTVL are supported on win10 and later */ + value = 5; + err = setsockopt(s, IPPROTO_TCP, TCP_KEEPCNT, (char*)&value, 4); + ok(!err || broken(WSAGetLastError() == WSAENOPROTOOPT), + "setsockopt TCP_KEEPCNT failed: %d\n", WSAGetLastError()); + + if (!err) + { + value = 0; + err = getsockopt(s, IPPROTO_TCP, TCP_KEEPCNT, (char*)&value, &size); + ok(!err, "getsockopt TCP_KEEPCNT failed\n"); + ok(value == 5, "TCP_KEEPCNT should be 5, is %ld\n", value); + + err = setsockopt(s, IPPROTO_TCP, TCP_KEEPINTVL, (char*)&value, 4); + ok(!err, "setsockopt TCP_KEEPINTVL failed\n"); + value = 0; + err = getsockopt(s, IPPROTO_TCP, TCP_KEEPINTVL, (char*)&value, &size); + ok(!err, "getsockopt TCP_KEEPINTVL failed\n"); + ok(value == 5, "TCP_KEEPINTVL should be 5, is %ld\n", value); + } + /* Test for erroneously passing a value instead of a pointer as optval */ size = sizeof(char); err = setsockopt(s, SOL_SOCKET, SO_DONTROUTE, (char *)1, size); @@ -1517,7 +1551,15 @@ static void test_set_getsockopt(void) size = sizeof(save_value); err = getsockopt(s2, test_optsize[i].level, test_optsize[i].optname, (char*)&save_value, &size); - ok(!err, "Unexpected getsockopt result %d.\n", err); + ok(!err || broken(test_optsize[i].allow_noprotoopt && WSAGetLastError() == WSAENOPROTOOPT), + "Unexpected getsockopt result %d.\n", err); + + if (err) + { + closesocket(s2); + winetest_pop_context(); + continue; + } value64 = 0xffffffff00000001; err = setsockopt(s2, test_optsize[i].level, test_optsize[i].optname, (char *)&value64, sizeof(value64)); @@ -3145,6 +3187,7 @@ static void test_UDP(void) struct sock_info peer[NUM_UDP_PEERS]; char buf[16]; int ss, i, n_recv, n_sent, ret; + struct sockaddr_in6 addr6; struct sockaddr_in addr; int sock; struct send_udp_thread_param udp_thread_param; @@ -3229,6 +3272,26 @@ static void test_UDP(void) CloseHandle( udp_thread_param.start_event ); closesocket(sock); + + /* Test sending to port 0. */ + sock = socket( AF_INET, SOCK_DGRAM, IPPROTO_UDP ); + ok( sock != INVALID_SOCKET, "got error %u.\n", WSAGetLastError() ); + memset( &addr, 0, sizeof(addr) ); + addr.sin_family = AF_INET; + addr.sin_addr.s_addr = inet_addr( "127.0.0.1" ); + ret = sendto( sock, buf, sizeof(buf), 0, (struct sockaddr *)&addr, sizeof(addr) ); + ok( ret == sizeof(buf), "got ret %d, error %u.\n", ret, WSAGetLastError() ); + closesocket(sock); + + sock = socket( AF_INET6, SOCK_DGRAM, 0 ); + ok( sock != INVALID_SOCKET, "got error %u.\n", WSAGetLastError() ); + memset( &addr6, 0, sizeof(addr6) ); + addr6.sin6_family = AF_INET6; + ret = inet_pton( AF_INET6, "::1", &addr6.sin6_addr ); + ok( ret, "got error %u.\n", WSAGetLastError() ); + ret = sendto( sock, buf, sizeof(buf), 0, (struct sockaddr *)&addr6, sizeof(addr6) ); + ok( ret == sizeof(buf), "got ret %d, error %u.\n", ret, WSAGetLastError() ); + closesocket(sock); } static void test_WSASocket(void) diff --git a/include/mfapi.h b/include/mfapi.h index 53fb5052e6e..86fdcb2b8e4 100644 --- a/include/mfapi.h +++ b/include/mfapi.h @@ -507,7 +507,11 @@ typedef enum struct tagVIDEOINFOHEADER; typedef struct tagVIDEOINFOHEADER VIDEOINFOHEADER; +struct tagVIDEOINFOHEADER2; +typedef struct tagVIDEOINFOHEADER2 VIDEOINFOHEADER2; typedef struct _AMMediaType AM_MEDIA_TYPE; +typedef struct tagMPEG1VIDEOINFO MPEG1VIDEOINFO; +typedef struct tagMPEG2VIDEOINFO MPEG2VIDEOINFO; HRESULT WINAPI MFAddPeriodicCallback(MFPERIODICCALLBACK callback, IUnknown *context, DWORD *key); HRESULT WINAPI MFAllocateSerialWorkQueue(DWORD target_queue, DWORD *queue); @@ -552,6 +556,7 @@ HRESULT WINAPI MFCreateMediaTypeFromRepresentation(GUID guid_representation, voi HRESULT WINAPI MFCreateSample(IMFSample **sample); HRESULT WINAPI MFCreateTempFile(MF_FILE_ACCESSMODE accessmode, MF_FILE_OPENMODE openmode, MF_FILE_FLAGS flags, IMFByteStream **bytestream); +HRESULT WINAPI MFCreateVideoMediaType(const MFVIDEOFORMAT *format, IMFVideoMediaType **media_type); HRESULT WINAPI MFCreateVideoMediaTypeFromSubtype(const GUID *subtype, IMFVideoMediaType **media_type); #ifdef _KSMEDIA_ @@ -593,8 +598,15 @@ BOOL WINAPI MFIsFormatYUV(DWORD format); HRESULT WINAPI MFInitAttributesFromBlob(IMFAttributes *attributes, const UINT8 *buffer, UINT size); HRESULT WINAPI MFInitAMMediaTypeFromMFMediaType(IMFMediaType *media_type, GUID format, AM_MEDIA_TYPE *am_type); HRESULT WINAPI MFInitMediaTypeFromAMMediaType(IMFMediaType *mediatype, const AM_MEDIA_TYPE *am_type); +HRESULT WINAPI MFInitMediaTypeFromMFVideoFormat(IMFMediaType *media_type, const MFVIDEOFORMAT *format, UINT32 size); HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader(IMFMediaType *media_type, const VIDEOINFOHEADER *vih, UINT32 size, const GUID *subtype); +HRESULT WINAPI MFInitMediaTypeFromVideoInfoHeader2(IMFMediaType *media_type, const VIDEOINFOHEADER2 *vih, + UINT32 size, const GUID *subtype); +HRESULT WINAPI MFInitMediaTypeFromMPEG1VideoInfo(IMFMediaType *media_type, const MPEG1VIDEOINFO *vih, + UINT32 size, const GUID *subtype); +HRESULT WINAPI MFInitMediaTypeFromMPEG2VideoInfo(IMFMediaType *media_type, const MPEG2VIDEOINFO *vih, + UINT32 size, const GUID *subtype); HRESULT WINAPI MFInitMediaTypeFromWaveFormatEx(IMFMediaType *mediatype, const WAVEFORMATEX *format, UINT32 size); HRESULT WINAPI MFInitVideoFormat_RGB(MFVIDEOFORMAT *format, DWORD width, DWORD height, DWORD d3dformat); HRESULT WINAPI MFInvokeCallback(IMFAsyncResult *result); diff --git a/include/mfidl.idl b/include/mfidl.idl index b3201b12d84..666af53c6e3 100644 --- a/include/mfidl.idl +++ b/include/mfidl.idl @@ -131,6 +131,13 @@ typedef enum _MF_VIDEO_PROCESSOR_ALGORITHM_TYPE MF_VIDEO_PROCESSOR_ALGORITHM_MRF_CRF_444 = 1, } MF_VIDEO_PROCESSOR_ALGORITHM_TYPE; +typedef enum MFTOPOLOGY_DXVA_MODE +{ + MFTOPOLOGY_DXVA_DEFAULT = 0, + MFTOPOLOGY_DXVA_NONE = 1, + MFTOPOLOGY_DXVA_FULL = 2, +} MFTOPOLOGY_DXVA_MODE; + [ object, uuid(2eb1e945-18b8-4139-9b1a-d5d584818530), diff --git a/include/wine/afd.h b/include/wine/afd.h index 788adb4a495..aba559ebd8a 100644 --- a/include/wine/afd.h +++ b/include/wine/afd.h @@ -285,6 +285,12 @@ C_ASSERT( sizeof(struct afd_get_events_params) == 56 ); #define IOCTL_AFD_WINE_SET_IP_RECVTOS WINE_AFD_IOC(296) #define IOCTL_AFD_WINE_GET_SO_EXCLUSIVEADDRUSE WINE_AFD_IOC(297) #define IOCTL_AFD_WINE_SET_SO_EXCLUSIVEADDRUSE WINE_AFD_IOC(298) +#define IOCTL_AFD_WINE_GET_TCP_KEEPALIVE WINE_AFD_IOC(299) +#define IOCTL_AFD_WINE_SET_TCP_KEEPALIVE WINE_AFD_IOC(300) +#define IOCTL_AFD_WINE_GET_TCP_KEEPCNT WINE_AFD_IOC(301) +#define IOCTL_AFD_WINE_SET_TCP_KEEPCNT WINE_AFD_IOC(302) +#define IOCTL_AFD_WINE_GET_TCP_KEEPINTVL WINE_AFD_IOC(303) +#define IOCTL_AFD_WINE_SET_TCP_KEEPINTVL WINE_AFD_IOC(304) struct afd_iovec { diff --git a/include/ws2ipdef.h b/include/ws2ipdef.h index fcb1f56c005..72e2dad2fa5 100644 --- a/include/ws2ipdef.h +++ b/include/ws2ipdef.h @@ -313,6 +313,8 @@ typedef struct WS(in6_pktinfo) { #define TCP_OFFLOAD_PREFERENCE 11 #define TCP_CONGESTION_ALGORITHM 12 #define TCP_DELAY_FIN_ACK 13 +#define TCP_KEEPCNT 16 +#define TCP_KEEPINTVL 17 #else /* WS_TCP_NODELAY is defined elsewhere */ #define WS_TCP_EXPEDITED_1122 2 @@ -327,6 +329,8 @@ typedef struct WS(in6_pktinfo) { #define WS_TCP_OFFLOAD_PREFERENCE 11 #define WS_TCP_CONGESTION_ALGORITHM 12 #define WS_TCP_DELAY_FIN_ACK 13 +#define WS_TCP_KEEPCNT 16 +#define WS_TCP_KEEPINTVL 17 #endif /* USE_WS_PREFIX */ #define PROTECTION_LEVEL_UNRESTRICTED 10 diff --git a/server/fsync.c b/server/fsync.c index dc50aa0a1f3..8097b86fcea 100644 --- a/server/fsync.c +++ b/server/fsync.c @@ -59,7 +59,7 @@ int do_fsync(void) if (do_fsync_cached == -1) { syscall( __NR_futex_waitv, 0, 0, 0, 0, 0); - do_fsync_cached = getenv("WINEFSYNC") && atoi(getenv("WINEFSYNC")) && errno != ENOSYS; + do_fsync_cached = getenv("WINEFSYNC") && atoi(getenv("WINEFSYNC")) && errno != ENOSYS && errno != EPERM; } return do_fsync_cached;