src/output: add algorithm for finding usable AudioFormat

* Use PcmExport for 24bit packed output
This commit is contained in:
Shen-Ta Hsieh 2020-12-02 07:59:33 +08:00 committed by Max Kellermann
parent 6f77af20d0
commit da642b2890
2 changed files with 257 additions and 180 deletions

2
NEWS
View File

@ -5,6 +5,8 @@ ver 0.22.7 (not yet released)
- ffmpeg: fix build problem with FFmpeg 3.4 - ffmpeg: fix build problem with FFmpeg 3.4
* storage * storage
- curl: don't use glibc extension - curl: don't use glibc extension
* output
- wasapi: add algorithm for finding usable audio format
ver 0.22.6 (2021/02/16) ver 0.22.6 (2021/02/16)
* fix missing tags on songs in queue * fix missing tags on songs in queue

View File

@ -23,11 +23,13 @@
#include "lib/icu/Win32.hxx" #include "lib/icu/Win32.hxx"
#include "mixer/MixerList.hxx" #include "mixer/MixerList.hxx"
#include "output/Error.hxx" #include "output/Error.hxx"
#include "pcm/Export.hxx"
#include "thread/Cond.hxx" #include "thread/Cond.hxx"
#include "thread/Mutex.hxx" #include "thread/Mutex.hxx"
#include "thread/Name.hxx" #include "thread/Name.hxx"
#include "thread/Thread.hxx" #include "thread/Thread.hxx"
#include "util/AllocatedString.hxx" #include "util/AllocatedString.hxx"
#include "util/ConstBuffer.hxx"
#include "util/Domain.hxx" #include "util/Domain.hxx"
#include "util/RuntimeError.hxx" #include "util/RuntimeError.hxx"
#include "util/ScopeExit.hxx" #include "util/ScopeExit.hxx"
@ -94,23 +96,38 @@ inline bool SafeSilenceTry(Functor &&functor) {
} }
} }
inline void SetFormat(WAVEFORMATEXTENSIBLE &device_format, std::vector<WAVEFORMATEXTENSIBLE> GetFormats(const AudioFormat &audio_format) noexcept {
const AudioFormat &audio_format) noexcept { std::vector<WAVEFORMATEXTENSIBLE> Result;
device_format.dwChannelMask = GetChannelMask(audio_format.channels); if (audio_format.format == SampleFormat::S24_P32) {
device_format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE; Result.resize(2);
device_format.Format.nChannels = audio_format.channels; Result[0].Format.wBitsPerSample = 24;
device_format.Format.nSamplesPerSec = audio_format.sample_rate; Result[0].Samples.wValidBitsPerSample = 24;
device_format.Format.nBlockAlign = audio_format.GetFrameSize(); Result[1].Format.wBitsPerSample = 32;
device_format.Format.nAvgBytesPerSec = Result[1].Samples.wValidBitsPerSample = 24;
audio_format.sample_rate * audio_format.GetFrameSize();
device_format.Format.wBitsPerSample = audio_format.GetSampleSize() * 8;
device_format.Format.cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
device_format.Samples.wValidBitsPerSample = audio_format.GetSampleSize() * 8;
if (audio_format.format == SampleFormat::FLOAT) {
device_format.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
} else { } else {
device_format.SubFormat = KSDATAFORMAT_SUBTYPE_PCM; Result.resize(1);
Result[0].Format.wBitsPerSample = audio_format.GetSampleSize() * 8;
Result[0].Samples.wValidBitsPerSample = audio_format.GetSampleSize() * 8;
} }
const DWORD mask = GetChannelMask(audio_format.channels);
const GUID guid = audio_format.format == SampleFormat::FLOAT
? KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
: KSDATAFORMAT_SUBTYPE_PCM;
for (auto &device_format : Result) {
device_format.dwChannelMask = mask;
device_format.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
device_format.Format.nChannels = audio_format.channels;
device_format.Format.nSamplesPerSec = audio_format.sample_rate;
device_format.Format.cbSize =
sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
device_format.SubFormat = guid;
device_format.Format.nBlockAlign = device_format.Format.nChannels *
device_format.Format.wBitsPerSample /
8;
device_format.Format.nAvgBytesPerSec =
audio_format.sample_rate * device_format.Format.nBlockAlign;
}
return Result;
} }
#ifdef ENABLE_DSD #ifdef ENABLE_DSD
@ -213,6 +230,7 @@ private:
WAVEFORMATEXTENSIBLE device_format; WAVEFORMATEXTENSIBLE device_format;
std::optional<WasapiOutputThread> thread; std::optional<WasapiOutputThread> thread;
std::size_t watermark; std::size_t watermark;
std::optional<PcmExport> pcm_export;
friend bool wasapi_is_exclusive(WasapiOutput &output) noexcept; friend bool wasapi_is_exclusive(WasapiOutput &output) noexcept;
friend IMMDevice *wasapi_output_get_device(WasapiOutput &output) noexcept; friend IMMDevice *wasapi_output_get_device(WasapiOutput &output) noexcept;
@ -222,6 +240,7 @@ private:
void DoOpen(AudioFormat &audio_format); void DoOpen(AudioFormat &audio_format);
void OpenDevice(); void OpenDevice();
bool TryFormatExclusive(const AudioFormat &audio_format);
void FindExclusiveFormatSupported(AudioFormat &audio_format); void FindExclusiveFormatSupported(AudioFormat &audio_format);
void FindSharedFormatSupported(AudioFormat &audio_format); void FindSharedFormatSupported(AudioFormat &audio_format);
void EnumerateDevices(); void EnumerateDevices();
@ -259,29 +278,28 @@ void WasapiOutputThread::Work() noexcept {
return; return;
} }
HRESULT result;
UINT32 data_in_frames;
result = client->GetCurrentPadding(&data_in_frames);
if (FAILED(result)) {
throw FormatHResultError(result,
"Failed to get current padding");
}
UINT32 write_in_frames = buffer_size_in_frames; UINT32 write_in_frames = buffer_size_in_frames;
if (!is_exclusive) { if (!is_exclusive) {
UINT32 data_in_frames;
if (HRESULT result =
client->GetCurrentPadding(&data_in_frames);
FAILED(result)) {
throw FormatHResultError(
result, "Failed to get current padding");
}
if (data_in_frames >= buffer_size_in_frames) { if (data_in_frames >= buffer_size_in_frames) {
continue; continue;
} }
write_in_frames -= data_in_frames; write_in_frames -= data_in_frames;
} else if (data_in_frames >= buffer_size_in_frames * 2) {
continue;
} }
BYTE *data; BYTE *data;
DWORD mode = 0; DWORD mode = 0;
result = render_client->GetBuffer(write_in_frames, &data); if (HRESULT result =
if (FAILED(result)) { render_client->GetBuffer(write_in_frames, &data);
FAILED(result)) {
throw FormatHResultError(result, "Failed to get buffer"); throw FormatHResultError(result, "Failed to get buffer");
} }
@ -338,10 +356,6 @@ void WasapiOutput::DoDisable() noexcept {
/// run inside COMWorkerThread /// run inside COMWorkerThread
void WasapiOutput::DoOpen(AudioFormat &audio_format) { void WasapiOutput::DoOpen(AudioFormat &audio_format) {
if (audio_format.channels == 0) {
throw FormatInvalidArgument("channels should > 0");
}
client.reset(); client.reset();
DWORD state; DWORD state;
@ -353,16 +367,12 @@ void WasapiOutput::DoOpen(AudioFormat &audio_format) {
OpenDevice(); OpenDevice();
} }
HRESULT result; if (HRESULT result = device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr,
result = device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr, client.AddressCast());
client.AddressCast()); FAILED(result)) {
if (FAILED(result)) {
throw FormatHResultError(result, "Unable to activate audio client"); throw FormatHResultError(result, "Unable to activate audio client");
} }
if (audio_format.format == SampleFormat::S24_P32) {
audio_format.format = SampleFormat::S32;
}
if (audio_format.channels > 8) { if (audio_format.channels > 8) {
audio_format.channels = 8; audio_format.channels = 8;
} }
@ -378,6 +388,24 @@ void WasapiOutput::DoOpen(AudioFormat &audio_format) {
} else { } else {
FindSharedFormatSupported(audio_format); FindSharedFormatSupported(audio_format);
} }
bool require_export = audio_format.format == SampleFormat::S24_P32;
if (require_export) {
PcmExport::Params params;
params.dsd_mode = PcmExport::DsdMode::NONE;
params.shift8 = false;
params.pack24 = false;
if (device_format.Format.wBitsPerSample == 32 &&
device_format.Samples.wValidBitsPerSample == 24) {
params.shift8 = true;
}
if (device_format.Format.wBitsPerSample == 24) {
params.pack24 = true;
}
FormatDebug(wasapi_output_domain, "Packing data: shift8=%d pack24=%d",
int(params.shift8), int(params.pack24));
pcm_export.emplace();
pcm_export->Open(audio_format.format, audio_format.channels, params);
}
using s = std::chrono::seconds; using s = std::chrono::seconds;
using ms = std::chrono::milliseconds; using ms = std::chrono::milliseconds;
@ -385,78 +413,95 @@ void WasapiOutput::DoOpen(AudioFormat &audio_format) {
using hundred_ns = std::chrono::duration<uint64_t, std::ratio<1, 10000000>>; using hundred_ns = std::chrono::duration<uint64_t, std::ratio<1, 10000000>>;
// The unit in REFERENCE_TIME is hundred nanoseconds // The unit in REFERENCE_TIME is hundred nanoseconds
REFERENCE_TIME device_period; REFERENCE_TIME default_device_period, min_device_period;
result = client->GetDevicePeriod(&device_period, nullptr);
if (FAILED(result)) { if (HRESULT result =
client->GetDevicePeriod(&default_device_period, &min_device_period);
FAILED(result)) {
throw FormatHResultError(result, "Unable to get device period"); throw FormatHResultError(result, "Unable to get device period");
} }
FormatDebug(wasapi_output_domain, "Device period: %I64u ns", FormatDebug(wasapi_output_domain,
size_t(ns(hundred_ns(device_period)).count())); "Default device period: %I64u ns, Minimum device period: "
"%I64u ns",
ns(hundred_ns(default_device_period)).count(),
ns(hundred_ns(min_device_period)).count());
REFERENCE_TIME buffer_duration = device_period; REFERENCE_TIME buffer_duration;
if (!Exclusive()) { if (Exclusive()) {
buffer_duration = default_device_period;
} else {
const REFERENCE_TIME align = hundred_ns(ms(50)).count(); const REFERENCE_TIME align = hundred_ns(ms(50)).count();
buffer_duration = (align / device_period) * device_period; buffer_duration = (align / default_device_period) * default_device_period;
} }
FormatDebug(wasapi_output_domain, "Buffer duration: %I64u ns", FormatDebug(wasapi_output_domain, "Buffer duration: %I64u ns",
size_t(ns(hundred_ns(buffer_duration)).count())); size_t(ns(hundred_ns(buffer_duration)).count()));
if (Exclusive()) { if (Exclusive()) {
result = client->Initialize( if (HRESULT result = client->Initialize(
AUDCLNT_SHAREMODE_EXCLUSIVE, AUDCLNT_SHAREMODE_EXCLUSIVE,
AUDCLNT_STREAMFLAGS_NOPERSIST | AUDCLNT_STREAMFLAGS_EVENTCALLBACK, AUDCLNT_STREAMFLAGS_EVENTCALLBACK, buffer_duration,
buffer_duration, buffer_duration, buffer_duration,
reinterpret_cast<WAVEFORMATEX *>(&device_format), nullptr); reinterpret_cast<WAVEFORMATEX *>(&device_format), nullptr);
if (result == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) { FAILED(result)) {
// https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-initialize if (result == AUDCLNT_E_BUFFER_SIZE_NOT_ALIGNED) {
UINT32 buffer_size_in_frames = 0; // https://docs.microsoft.com/en-us/windows/win32/api/audioclient/nf-audioclient-iaudioclient-initialize
result = client->GetBufferSize(&buffer_size_in_frames); UINT32 buffer_size_in_frames = 0;
if (FAILED(result)) { result = client->GetBufferSize(&buffer_size_in_frames);
throw FormatHResultError( if (FAILED(result)) {
result, "Unable to get audio client buffer size"); throw FormatHResultError(
} result,
buffer_duration = std::ceil( "Unable to get audio client buffer size");
double(buffer_size_in_frames * hundred_ns(s(1)).count()) / }
SampleRate()); buffer_duration =
FormatDebug(wasapi_output_domain, std::ceil(double(buffer_size_in_frames *
"Aligned buffer duration: %I64u ns", hundred_ns(s(1)).count()) /
size_t(ns(hundred_ns(buffer_duration)).count())); SampleRate());
client.reset(); FormatDebug(
result = device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, wasapi_output_domain,
nullptr, client.AddressCast()); "Aligned buffer duration: %I64u ns",
if (FAILED(result)) { size_t(ns(hundred_ns(buffer_duration)).count()));
throw FormatHResultError( client.reset();
result, "Unable to activate audio client"); result = device->Activate(__uuidof(IAudioClient),
} CLSCTX_ALL, nullptr,
result = client->Initialize( client.AddressCast());
AUDCLNT_SHAREMODE_EXCLUSIVE, if (FAILED(result)) {
AUDCLNT_STREAMFLAGS_NOPERSIST | throw FormatHResultError(
result,
"Unable to activate audio client");
}
result = client->Initialize(
AUDCLNT_SHAREMODE_EXCLUSIVE,
AUDCLNT_STREAMFLAGS_EVENTCALLBACK, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
buffer_duration, buffer_duration, buffer_duration, buffer_duration,
reinterpret_cast<WAVEFORMATEX *>(&device_format), reinterpret_cast<WAVEFORMATEX *>(&device_format),
nullptr); nullptr);
}
if (FAILED(result)) {
throw FormatHResultError(
result, "Unable to initialize audio client");
}
} }
} else { } else {
result = client->Initialize( if (HRESULT result = client->Initialize(
AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
AUDCLNT_STREAMFLAGS_NOPERSIST | AUDCLNT_STREAMFLAGS_EVENTCALLBACK, buffer_duration, 0,
buffer_duration, 0, reinterpret_cast<WAVEFORMATEX *>(&device_format), nullptr);
reinterpret_cast<WAVEFORMATEX *>(&device_format), nullptr); FAILED(result)) {
} throw FormatHResultError(result,
"Unable to initialize audio client");
if (FAILED(result)) { }
throw FormatHResultError(result, "Unable to initialize audio client");
} }
ComPtr<IAudioRenderClient> render_client; ComPtr<IAudioRenderClient> render_client;
result = client->GetService(IID_PPV_ARGS(render_client.Address())); if (HRESULT result = client->GetService(IID_PPV_ARGS(render_client.Address()));
if (FAILED(result)) { FAILED(result)) {
throw FormatHResultError(result, "Unable to get new render client"); throw FormatHResultError(result, "Unable to get new render client");
} }
UINT32 buffer_size_in_frames; UINT32 buffer_size_in_frames;
result = client->GetBufferSize(&buffer_size_in_frames); if (HRESULT result = client->GetBufferSize(&buffer_size_in_frames);
if (FAILED(result)) { FAILED(result)) {
throw FormatHResultError(result, throw FormatHResultError(result,
"Unable to get audio client buffer size"); "Unable to get audio client buffer size");
} }
@ -465,8 +510,8 @@ void WasapiOutput::DoOpen(AudioFormat &audio_format) {
thread.emplace(client.get(), std::move(render_client), FrameSize(), thread.emplace(client.get(), std::move(render_client), FrameSize(),
buffer_size_in_frames, is_exclusive); buffer_size_in_frames, is_exclusive);
result = client->SetEventHandle(thread->event.handle()); if (HRESULT result = client->SetEventHandle(thread->event.handle());
if (FAILED(result)) { FAILED(result)) {
throw FormatHResultError(result, "Unable to set event handler"); throw FormatHResultError(result, "Unable to set event handler");
} }
@ -474,19 +519,33 @@ void WasapiOutput::DoOpen(AudioFormat &audio_format) {
} }
void WasapiOutput::Close() noexcept { void WasapiOutput::Close() noexcept {
assert(client && thread); assert(thread);
Pause();
try {
COMWorker::Async([&]() {
if (HRESULT result = client->Stop(); FAILED(result)) {
throw FormatHResultError(result, "Failed to stop client");
}
}).get();
thread->CheckException();
} catch (std::exception &err) {
FormatError(wasapi_output_domain, "exception while stoping: %s",
err.what());
}
is_started = false;
thread->Finish(); thread->Finish();
thread->Join(); thread->Join();
COMWorker::Async([&]() { COMWorker::Async([&]() {
thread.reset(); thread.reset();
client.reset(); client.reset();
}).get(); }).get();
pcm_export.reset();
} }
std::chrono::steady_clock::duration WasapiOutput::Delay() const noexcept { std::chrono::steady_clock::duration WasapiOutput::Delay() const noexcept {
if (!client || !is_started) { if (!is_started) {
return std::chrono::steady_clock::duration::zero(); // idle while paused
return std::chrono::seconds(1);
} }
assert(thread); assert(thread);
@ -505,9 +564,16 @@ size_t WasapiOutput::Play(const void *chunk, size_t size) {
not_interrupted.test_and_set(); not_interrupted.test_and_set();
ConstBuffer<void> input(chunk, size);
if (pcm_export) {
input = pcm_export->Export(input);
}
if (input.empty())
return size;
do { do {
const size_t consumed_size = const size_t consumed_size = thread->spsc_buffer.push(
thread->spsc_buffer.push(static_cast<const BYTE *>(chunk), size); static_cast<const BYTE *>(input.data), input.size);
if (consumed_size == 0) { if (consumed_size == 0) {
assert(is_started); assert(is_started);
thread->WaitDataPoped(); thread->WaitDataPoped();
@ -519,12 +585,9 @@ size_t WasapiOutput::Play(const void *chunk, size_t size) {
if (!is_started) { if (!is_started) {
is_started = true; is_started = true;
thread->Play(); thread->Play();
COMWorker::Async([&]() { COMWorker::Async([&]() {
HRESULT result; if (HRESULT result = client->Start(); FAILED(result)) {
result = client->Start();
if (FAILED(result)) {
throw FormatHResultError( throw FormatHResultError(
result, "Failed to start client"); result, "Failed to start client");
} }
@ -533,28 +596,19 @@ size_t WasapiOutput::Play(const void *chunk, size_t size) {
thread->CheckException(); thread->CheckException();
if (pcm_export) {
return pcm_export->CalcInputSize(consumed_size);
}
return consumed_size; return consumed_size;
} while (true); } while (true);
} }
bool WasapiOutput::Pause() { bool WasapiOutput::Pause() {
if (!client || !thread) { if (is_started) {
return false; thread->Pause();
is_started = false;
} }
if (!is_started) {
return true;
}
HRESULT result;
result = client->Stop();
if (FAILED(result)) {
throw FormatHResultError(result, "Failed to stop client");
}
is_started = false;
thread->Pause();
thread->CheckException(); thread->CheckException();
return true; return true;
} }
@ -603,70 +657,69 @@ void WasapiOutput::OpenDevice() {
} }
/// run inside COMWorkerThread /// run inside COMWorkerThread
void WasapiOutput::FindExclusiveFormatSupported(AudioFormat &audio_format) { bool WasapiOutput::TryFormatExclusive(const AudioFormat &audio_format) {
SetFormat(device_format, audio_format); for (auto test_format : GetFormats(audio_format)) {
HRESULT result = client->IsFormatSupported(
do {
HRESULT result;
result = client->IsFormatSupported(
AUDCLNT_SHAREMODE_EXCLUSIVE, AUDCLNT_SHAREMODE_EXCLUSIVE,
reinterpret_cast<WAVEFORMATEX *>(&device_format), nullptr); reinterpret_cast<WAVEFORMATEX *>(&test_format), nullptr);
const auto format_string = ToString(audio_format);
switch (result) { const auto result_string = std::string(HRESULTToString(result));
case S_OK: FormatDebug(wasapi_output_domain, "Trying %s %lu %u-%u (exclusive) -> %s",
return; format_string.c_str(), test_format.Format.nSamplesPerSec,
case AUDCLNT_E_UNSUPPORTED_FORMAT: test_format.Format.wBitsPerSample,
break; test_format.Samples.wValidBitsPerSample,
default: result_string.c_str());
throw FormatHResultError(result, "IsFormatSupported failed"); if (SUCCEEDED(result)) {
device_format = test_format;
return true;
} }
}
return false;
}
// Trying PCM fallback. /// run inside COMWorkerThread
if (audio_format.format == SampleFormat::FLOAT) { void WasapiOutput::FindExclusiveFormatSupported(AudioFormat &audio_format) {
audio_format.format = SampleFormat::S32; for (uint8_t channels : {0, 2, 6, 8, 7, 1, 4, 5, 3}) {
if (audio_format.channels == channels) {
continue; continue;
} }
if (channels == 0) {
// Trying sample rate fallback. channels = audio_format.channels;
if (audio_format.sample_rate > 96000) {
audio_format.sample_rate = 96000;
continue;
} }
auto old_channels = std::exchange(audio_format.channels, channels);
if (audio_format.sample_rate > 88200) { for (uint32_t rate : {0, 384000, 352800, 192000, 176400, 96000, 88200,
audio_format.sample_rate = 88200; 48000, 44100, 32000, 22050, 16000, 11025, 8000}) {
continue; if (audio_format.sample_rate <= rate) {
continue;
}
if (rate == 0) {
rate = audio_format.sample_rate;
}
auto old_rate = std::exchange(audio_format.sample_rate, rate);
for (SampleFormat format : {
SampleFormat::UNDEFINED,
SampleFormat::S32,
SampleFormat::S24_P32,
SampleFormat::S16,
SampleFormat::S8,
}) {
if (audio_format.format == format) {
continue;
}
if (format == SampleFormat::UNDEFINED) {
format = audio_format.format;
}
auto old_format =
std::exchange(audio_format.format, format);
if (TryFormatExclusive(audio_format)) {
return;
}
audio_format.format = old_format;
}
audio_format.sample_rate = old_rate;
} }
audio_format.channels = old_channels;
if (audio_format.sample_rate > 64000) { }
audio_format.sample_rate = 64000;
continue;
}
if (audio_format.sample_rate > 48000) {
audio_format.sample_rate = 48000;
continue;
}
// Trying 2 channels fallback.
if (audio_format.channels > 2) {
audio_format.channels = 2;
continue;
}
// Trying S16 fallback.
if (audio_format.format == SampleFormat::S32) {
audio_format.format = SampleFormat::S16;
continue;
}
if (audio_format.sample_rate > 41100) {
audio_format.sample_rate = 41100;
continue;
}
throw FormatHResultError(result, "Format is not supported");
} while (true);
} }
/// run inside COMWorkerThread /// run inside COMWorkerThread
@ -679,15 +732,23 @@ void WasapiOutput::FindSharedFormatSupported(AudioFormat &audio_format) {
if (FAILED(result)) { if (FAILED(result)) {
throw FormatHResultError(result, "GetMixFormat failed"); throw FormatHResultError(result, "GetMixFormat failed");
} }
audio_format.sample_rate = device_format.Format.nSamplesPerSec; audio_format.sample_rate = mixer_format->nSamplesPerSec;
device_format = GetFormats(audio_format).front();
SetFormat(device_format, audio_format);
ComHeapPtr<WAVEFORMATEXTENSIBLE> closest_format; ComHeapPtr<WAVEFORMATEXTENSIBLE> closest_format;
result = client->IsFormatSupported( result = client->IsFormatSupported(
AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_SHARED,
reinterpret_cast<WAVEFORMATEX *>(&device_format), reinterpret_cast<WAVEFORMATEX *>(&device_format),
closest_format.AddressCast<WAVEFORMATEX>()); closest_format.AddressCast<WAVEFORMATEX>());
{
const auto format_string = ToString(audio_format);
const auto result_string = std::string(HRESULTToString(result));
FormatDebug(wasapi_output_domain, "Trying %s %lu %u-%u (shared) -> %s",
format_string.c_str(), device_format.Format.nSamplesPerSec,
device_format.Format.wBitsPerSample,
device_format.Samples.wValidBitsPerSample,
result_string.c_str());
}
if (FAILED(result) && result != AUDCLNT_E_UNSUPPORTED_FORMAT) { if (FAILED(result) && result != AUDCLNT_E_UNSUPPORTED_FORMAT) {
throw FormatHResultError(result, "IsFormatSupported failed"); throw FormatHResultError(result, "IsFormatSupported failed");
@ -701,12 +762,23 @@ void WasapiOutput::FindSharedFormatSupported(AudioFormat &audio_format) {
// Trying channels fallback. // Trying channels fallback.
audio_format.channels = mixer_format->nChannels; audio_format.channels = mixer_format->nChannels;
SetFormat(device_format, audio_format); device_format = GetFormats(audio_format).front();
result = client->IsFormatSupported( result = client->IsFormatSupported(
AUDCLNT_SHAREMODE_SHARED, AUDCLNT_SHAREMODE_SHARED,
reinterpret_cast<WAVEFORMATEX *>(&device_format), reinterpret_cast<WAVEFORMATEX *>(&device_format),
closest_format.AddressCast<WAVEFORMATEX>()); closest_format.AddressCast<WAVEFORMATEX>());
{
const auto format_string = ToString(audio_format);
const auto result_string = std::string(HRESULTToString(result));
FormatDebug(wasapi_output_domain,
"Trying %s %lu %u-%u (shared) -> %s",
format_string.c_str(),
device_format.Format.nSamplesPerSec,
device_format.Format.wBitsPerSample,
device_format.Samples.wValidBitsPerSample,
result_string.c_str());
}
if (FAILED(result)) { if (FAILED(result)) {
throw FormatHResultError(result, "Format is not supported"); throw FormatHResultError(result, "Format is not supported");
} }
@ -745,7 +817,10 @@ void WasapiOutput::FindSharedFormatSupported(AudioFormat &audio_format) {
audio_format.format = SampleFormat::S16; audio_format.format = SampleFormat::S16;
break; break;
case 32: case 32:
audio_format.format = SampleFormat::S32; audio_format.format =
device_format.Samples.wValidBitsPerSample == 32
? SampleFormat::S32
: SampleFormat::S24_P32;
break; break;
} }
} else if (device_format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT) { } else if (device_format.SubFormat == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT) {