🛠️🐜 Antkeeper superbuild with dependencies included https://antkeeper.com
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1814 lines
56 KiB

  1. /**
  2. * OpenAL cross platform audio library
  3. * Copyright (C) 2011 by authors.
  4. * This library is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Library General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2 of the License, or (at your option) any later version.
  8. *
  9. * This library is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Library General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Library General Public
  15. * License along with this library; if not, write to the
  16. * Free Software Foundation, Inc.,
  17. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  18. * Or go to http://www.gnu.org/copyleft/lgpl.html
  19. */
  20. #include "config.h"
  21. #include "wasapi.h"
  22. #define WIN32_LEAN_AND_MEAN
  23. #include <windows.h>
  24. #include <stdlib.h>
  25. #include <stdio.h>
  26. #include <memory.h>
  27. #include <wtypes.h>
  28. #include <mmdeviceapi.h>
  29. #include <audioclient.h>
  30. #include <cguid.h>
  31. #include <devpropdef.h>
  32. #include <mmreg.h>
  33. #include <propsys.h>
  34. #include <propkey.h>
  35. #include <devpkey.h>
  36. #ifndef _WAVEFORMATEXTENSIBLE_
  37. #include <ks.h>
  38. #include <ksmedia.h>
  39. #endif
  40. #include <algorithm>
  41. #include <atomic>
  42. #include <chrono>
  43. #include <condition_variable>
  44. #include <cstring>
  45. #include <deque>
  46. #include <functional>
  47. #include <future>
  48. #include <mutex>
  49. #include <string>
  50. #include <thread>
  51. #include <vector>
  52. #include "albit.h"
  53. #include "alnumeric.h"
  54. #include "comptr.h"
  55. #include "core/converter.h"
  56. #include "core/device.h"
  57. #include "core/helpers.h"
  58. #include "core/logging.h"
  59. #include "ringbuffer.h"
  60. #include "strutils.h"
  61. #include "threads.h"
  62. /* Some headers seem to define these as macros for __uuidof, which is annoying
  63. * since some headers don't declare them at all. Hopefully the ifdef is enough
  64. * to tell if they need to be declared.
  65. */
  66. #ifndef KSDATAFORMAT_SUBTYPE_PCM
  67. DEFINE_GUID(KSDATAFORMAT_SUBTYPE_PCM, 0x00000001, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
  68. #endif
  69. #ifndef KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
  70. DEFINE_GUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, 0x00000003, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
  71. #endif
  72. DEFINE_DEVPROPKEY(DEVPKEY_Device_FriendlyName, 0xa45c254e, 0xdf1c, 0x4efd, 0x80,0x20, 0x67,0xd1,0x46,0xa8,0x50,0xe0, 14);
  73. DEFINE_PROPERTYKEY(PKEY_AudioEndpoint_FormFactor, 0x1da5d803, 0xd492, 0x4edd, 0x8c,0x23, 0xe0,0xc0,0xff,0xee,0x7f,0x0e, 0);
  74. DEFINE_PROPERTYKEY(PKEY_AudioEndpoint_GUID, 0x1da5d803, 0xd492, 0x4edd, 0x8c, 0x23,0xe0, 0xc0,0xff,0xee,0x7f,0x0e, 4 );
  75. namespace {
  76. using std::chrono::milliseconds;
  77. using std::chrono::seconds;
  78. using ReferenceTime = std::chrono::duration<REFERENCE_TIME,std::ratio<1,10000000>>;
  79. inline constexpr ReferenceTime operator "" _reftime(unsigned long long int n) noexcept
  80. { return ReferenceTime{static_cast<REFERENCE_TIME>(n)}; }
  81. #define MONO SPEAKER_FRONT_CENTER
  82. #define STEREO (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT)
  83. #define QUAD (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
  84. #define X5DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  85. #define X5DOT1REAR (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
  86. #define X6DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_CENTER|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  87. #define X7DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  88. constexpr inline DWORD MaskFromTopBits(DWORD b) noexcept
  89. {
  90. b |= b>>1;
  91. b |= b>>2;
  92. b |= b>>4;
  93. b |= b>>8;
  94. b |= b>>16;
  95. return b;
  96. }
  97. constexpr DWORD MonoMask{MaskFromTopBits(MONO)};
  98. constexpr DWORD StereoMask{MaskFromTopBits(STEREO)};
  99. constexpr DWORD QuadMask{MaskFromTopBits(QUAD)};
  100. constexpr DWORD X51Mask{MaskFromTopBits(X5DOT1)};
  101. constexpr DWORD X51RearMask{MaskFromTopBits(X5DOT1REAR)};
  102. constexpr DWORD X61Mask{MaskFromTopBits(X6DOT1)};
  103. constexpr DWORD X71Mask{MaskFromTopBits(X7DOT1)};
  104. constexpr char DevNameHead[] = "OpenAL Soft on ";
  105. constexpr size_t DevNameHeadLen{al::size(DevNameHead) - 1};
  106. /* Scales the given reftime value, rounding the result. */
  107. inline uint RefTime2Samples(const ReferenceTime &val, uint srate)
  108. {
  109. const auto retval = (val*srate + ReferenceTime{seconds{1}}/2) / seconds{1};
  110. return static_cast<uint>(mini64(retval, std::numeric_limits<uint>::max()));
  111. }
  112. class GuidPrinter {
  113. char mMsg[64];
  114. public:
  115. GuidPrinter(const GUID &guid)
  116. {
  117. std::snprintf(mMsg, al::size(mMsg), "{%08lx-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x}",
  118. DWORD{guid.Data1}, guid.Data2, guid.Data3, guid.Data4[0], guid.Data4[1], guid.Data4[2],
  119. guid.Data4[3], guid.Data4[4], guid.Data4[5], guid.Data4[6], guid.Data4[7]);
  120. }
  121. const char *c_str() const { return mMsg; }
  122. };
  123. struct PropVariant {
  124. PROPVARIANT mProp;
  125. public:
  126. PropVariant() { PropVariantInit(&mProp); }
  127. ~PropVariant() { clear(); }
  128. void clear() { PropVariantClear(&mProp); }
  129. PROPVARIANT* get() noexcept { return &mProp; }
  130. PROPVARIANT& operator*() noexcept { return mProp; }
  131. const PROPVARIANT& operator*() const noexcept { return mProp; }
  132. PROPVARIANT* operator->() noexcept { return &mProp; }
  133. const PROPVARIANT* operator->() const noexcept { return &mProp; }
  134. };
  135. struct DevMap {
  136. std::string name;
  137. std::string endpoint_guid; // obtained from PKEY_AudioEndpoint_GUID , set to "Unknown device GUID" if absent.
  138. std::wstring devid;
  139. template<typename T0, typename T1, typename T2>
  140. DevMap(T0&& name_, T1&& guid_, T2&& devid_)
  141. : name{std::forward<T0>(name_)}
  142. , endpoint_guid{std::forward<T1>(guid_)}
  143. , devid{std::forward<T2>(devid_)}
  144. { }
  145. };
  146. bool checkName(const al::vector<DevMap> &list, const std::string &name)
  147. {
  148. auto match_name = [&name](const DevMap &entry) -> bool
  149. { return entry.name == name; };
  150. return std::find_if(list.cbegin(), list.cend(), match_name) != list.cend();
  151. }
  152. al::vector<DevMap> PlaybackDevices;
  153. al::vector<DevMap> CaptureDevices;
  154. using NameGUIDPair = std::pair<std::string,std::string>;
  155. NameGUIDPair get_device_name_and_guid(IMMDevice *device)
  156. {
  157. static constexpr char UnknownName[]{"Unknown Device Name"};
  158. static constexpr char UnknownGuid[]{"Unknown Device GUID"};
  159. std::string name, guid;
  160. ComPtr<IPropertyStore> ps;
  161. HRESULT hr = device->OpenPropertyStore(STGM_READ, ps.getPtr());
  162. if(FAILED(hr))
  163. {
  164. WARN("OpenPropertyStore failed: 0x%08lx\n", hr);
  165. return std::make_pair(UnknownName, UnknownGuid);
  166. }
  167. PropVariant pvprop;
  168. hr = ps->GetValue(reinterpret_cast<const PROPERTYKEY&>(DEVPKEY_Device_FriendlyName), pvprop.get());
  169. if(FAILED(hr))
  170. {
  171. WARN("GetValue Device_FriendlyName failed: 0x%08lx\n", hr);
  172. name += UnknownName;
  173. }
  174. else if(pvprop->vt == VT_LPWSTR)
  175. name += wstr_to_utf8(pvprop->pwszVal);
  176. else
  177. {
  178. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvprop->vt);
  179. name += UnknownName;
  180. }
  181. pvprop.clear();
  182. hr = ps->GetValue(reinterpret_cast<const PROPERTYKEY&>(PKEY_AudioEndpoint_GUID), pvprop.get());
  183. if(FAILED(hr))
  184. {
  185. WARN("GetValue AudioEndpoint_GUID failed: 0x%08lx\n", hr);
  186. guid = UnknownGuid;
  187. }
  188. else if(pvprop->vt == VT_LPWSTR)
  189. guid = wstr_to_utf8(pvprop->pwszVal);
  190. else
  191. {
  192. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvprop->vt);
  193. guid = UnknownGuid;
  194. }
  195. return std::make_pair(std::move(name), std::move(guid));
  196. }
  197. EndpointFormFactor get_device_formfactor(IMMDevice *device)
  198. {
  199. ComPtr<IPropertyStore> ps;
  200. HRESULT hr{device->OpenPropertyStore(STGM_READ, ps.getPtr())};
  201. if(FAILED(hr))
  202. {
  203. WARN("OpenPropertyStore failed: 0x%08lx\n", hr);
  204. return UnknownFormFactor;
  205. }
  206. EndpointFormFactor formfactor{UnknownFormFactor};
  207. PropVariant pvform;
  208. hr = ps->GetValue(PKEY_AudioEndpoint_FormFactor, pvform.get());
  209. if(FAILED(hr))
  210. WARN("GetValue AudioEndpoint_FormFactor failed: 0x%08lx\n", hr);
  211. else if(pvform->vt == VT_UI4)
  212. formfactor = static_cast<EndpointFormFactor>(pvform->ulVal);
  213. else if(pvform->vt != VT_EMPTY)
  214. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvform->vt);
  215. return formfactor;
  216. }
  217. void add_device(IMMDevice *device, const WCHAR *devid, al::vector<DevMap> &list)
  218. {
  219. for(auto &entry : list)
  220. {
  221. if(entry.devid == devid)
  222. return;
  223. }
  224. auto name_guid = get_device_name_and_guid(device);
  225. int count{1};
  226. std::string newname{name_guid.first};
  227. while(checkName(list, newname))
  228. {
  229. newname = name_guid.first;
  230. newname += " #";
  231. newname += std::to_string(++count);
  232. }
  233. list.emplace_back(std::move(newname), std::move(name_guid.second), devid);
  234. const DevMap &newentry = list.back();
  235. TRACE("Got device \"%s\", \"%s\", \"%ls\"\n", newentry.name.c_str(),
  236. newentry.endpoint_guid.c_str(), newentry.devid.c_str());
  237. }
  238. WCHAR *get_device_id(IMMDevice *device)
  239. {
  240. WCHAR *devid;
  241. const HRESULT hr{device->GetId(&devid)};
  242. if(FAILED(hr))
  243. {
  244. ERR("Failed to get device id: %lx\n", hr);
  245. return nullptr;
  246. }
  247. return devid;
  248. }
  249. void probe_devices(IMMDeviceEnumerator *devenum, EDataFlow flowdir, al::vector<DevMap> &list)
  250. {
  251. al::vector<DevMap>{}.swap(list);
  252. ComPtr<IMMDeviceCollection> coll;
  253. HRESULT hr{devenum->EnumAudioEndpoints(flowdir, DEVICE_STATE_ACTIVE, coll.getPtr())};
  254. if(FAILED(hr))
  255. {
  256. ERR("Failed to enumerate audio endpoints: 0x%08lx\n", hr);
  257. return;
  258. }
  259. UINT count{0};
  260. hr = coll->GetCount(&count);
  261. if(SUCCEEDED(hr) && count > 0)
  262. list.reserve(count);
  263. ComPtr<IMMDevice> device;
  264. hr = devenum->GetDefaultAudioEndpoint(flowdir, eMultimedia, device.getPtr());
  265. if(SUCCEEDED(hr))
  266. {
  267. if(WCHAR *devid{get_device_id(device.get())})
  268. {
  269. add_device(device.get(), devid, list);
  270. CoTaskMemFree(devid);
  271. }
  272. device = nullptr;
  273. }
  274. for(UINT i{0};i < count;++i)
  275. {
  276. hr = coll->Item(i, device.getPtr());
  277. if(FAILED(hr)) continue;
  278. if(WCHAR *devid{get_device_id(device.get())})
  279. {
  280. add_device(device.get(), devid, list);
  281. CoTaskMemFree(devid);
  282. }
  283. device = nullptr;
  284. }
  285. }
  286. bool MakeExtensible(WAVEFORMATEXTENSIBLE *out, const WAVEFORMATEX *in)
  287. {
  288. *out = WAVEFORMATEXTENSIBLE{};
  289. if(in->wFormatTag == WAVE_FORMAT_EXTENSIBLE)
  290. {
  291. *out = *CONTAINING_RECORD(in, const WAVEFORMATEXTENSIBLE, Format);
  292. out->Format.cbSize = sizeof(*out) - sizeof(out->Format);
  293. }
  294. else if(in->wFormatTag == WAVE_FORMAT_PCM)
  295. {
  296. out->Format = *in;
  297. out->Format.cbSize = 0;
  298. out->Samples.wValidBitsPerSample = out->Format.wBitsPerSample;
  299. if(out->Format.nChannels == 1)
  300. out->dwChannelMask = MONO;
  301. else if(out->Format.nChannels == 2)
  302. out->dwChannelMask = STEREO;
  303. else
  304. ERR("Unhandled PCM channel count: %d\n", out->Format.nChannels);
  305. out->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  306. }
  307. else if(in->wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
  308. {
  309. out->Format = *in;
  310. out->Format.cbSize = 0;
  311. out->Samples.wValidBitsPerSample = out->Format.wBitsPerSample;
  312. if(out->Format.nChannels == 1)
  313. out->dwChannelMask = MONO;
  314. else if(out->Format.nChannels == 2)
  315. out->dwChannelMask = STEREO;
  316. else
  317. ERR("Unhandled IEEE float channel count: %d\n", out->Format.nChannels);
  318. out->SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  319. }
  320. else
  321. {
  322. ERR("Unhandled format tag: 0x%04x\n", in->wFormatTag);
  323. return false;
  324. }
  325. return true;
  326. }
  327. void TraceFormat(const char *msg, const WAVEFORMATEX *format)
  328. {
  329. constexpr size_t fmtex_extra_size{sizeof(WAVEFORMATEXTENSIBLE)-sizeof(WAVEFORMATEX)};
  330. if(format->wFormatTag == WAVE_FORMAT_EXTENSIBLE && format->cbSize >= fmtex_extra_size)
  331. {
  332. const WAVEFORMATEXTENSIBLE *fmtex{
  333. CONTAINING_RECORD(format, const WAVEFORMATEXTENSIBLE, Format)};
  334. TRACE("%s:\n"
  335. " FormatTag = 0x%04x\n"
  336. " Channels = %d\n"
  337. " SamplesPerSec = %lu\n"
  338. " AvgBytesPerSec = %lu\n"
  339. " BlockAlign = %d\n"
  340. " BitsPerSample = %d\n"
  341. " Size = %d\n"
  342. " Samples = %d\n"
  343. " ChannelMask = 0x%lx\n"
  344. " SubFormat = %s\n",
  345. msg, fmtex->Format.wFormatTag, fmtex->Format.nChannels, fmtex->Format.nSamplesPerSec,
  346. fmtex->Format.nAvgBytesPerSec, fmtex->Format.nBlockAlign, fmtex->Format.wBitsPerSample,
  347. fmtex->Format.cbSize, fmtex->Samples.wReserved, fmtex->dwChannelMask,
  348. GuidPrinter{fmtex->SubFormat}.c_str());
  349. }
  350. else
  351. TRACE("%s:\n"
  352. " FormatTag = 0x%04x\n"
  353. " Channels = %d\n"
  354. " SamplesPerSec = %lu\n"
  355. " AvgBytesPerSec = %lu\n"
  356. " BlockAlign = %d\n"
  357. " BitsPerSample = %d\n"
  358. " Size = %d\n",
  359. msg, format->wFormatTag, format->nChannels, format->nSamplesPerSec,
  360. format->nAvgBytesPerSec, format->nBlockAlign, format->wBitsPerSample, format->cbSize);
  361. }
  362. enum class MsgType {
  363. OpenDevice,
  364. ReopenDevice,
  365. ResetDevice,
  366. StartDevice,
  367. StopDevice,
  368. CloseDevice,
  369. EnumeratePlayback,
  370. EnumerateCapture,
  371. Count,
  372. QuitThread = Count
  373. };
  374. constexpr char MessageStr[static_cast<size_t>(MsgType::Count)][20]{
  375. "Open Device",
  376. "Reopen Device",
  377. "Reset Device",
  378. "Start Device",
  379. "Stop Device",
  380. "Close Device",
  381. "Enumerate Playback",
  382. "Enumerate Capture"
  383. };
  384. /* Proxy interface used by the message handler. */
  385. struct WasapiProxy {
  386. virtual ~WasapiProxy() = default;
  387. virtual HRESULT openProxy(const char *name) = 0;
  388. virtual void closeProxy() = 0;
  389. virtual HRESULT resetProxy() = 0;
  390. virtual HRESULT startProxy() = 0;
  391. virtual void stopProxy() = 0;
  392. struct Msg {
  393. MsgType mType;
  394. WasapiProxy *mProxy;
  395. const char *mParam;
  396. std::promise<HRESULT> mPromise;
  397. explicit operator bool() const noexcept { return mType != MsgType::QuitThread; }
  398. };
  399. static std::deque<Msg> mMsgQueue;
  400. static std::mutex mMsgQueueLock;
  401. static std::condition_variable mMsgQueueCond;
  402. std::future<HRESULT> pushMessage(MsgType type, const char *param=nullptr)
  403. {
  404. std::promise<HRESULT> promise;
  405. std::future<HRESULT> future{promise.get_future()};
  406. {
  407. std::lock_guard<std::mutex> _{mMsgQueueLock};
  408. mMsgQueue.emplace_back(Msg{type, this, param, std::move(promise)});
  409. }
  410. mMsgQueueCond.notify_one();
  411. return future;
  412. }
  413. static std::future<HRESULT> pushMessageStatic(MsgType type)
  414. {
  415. std::promise<HRESULT> promise;
  416. std::future<HRESULT> future{promise.get_future()};
  417. {
  418. std::lock_guard<std::mutex> _{mMsgQueueLock};
  419. mMsgQueue.emplace_back(Msg{type, nullptr, nullptr, std::move(promise)});
  420. }
  421. mMsgQueueCond.notify_one();
  422. return future;
  423. }
  424. static Msg popMessage()
  425. {
  426. std::unique_lock<std::mutex> lock{mMsgQueueLock};
  427. mMsgQueueCond.wait(lock, []{return !mMsgQueue.empty();});
  428. Msg msg{std::move(mMsgQueue.front())};
  429. mMsgQueue.pop_front();
  430. return msg;
  431. }
  432. static int messageHandler(std::promise<HRESULT> *promise);
  433. };
  434. std::deque<WasapiProxy::Msg> WasapiProxy::mMsgQueue;
  435. std::mutex WasapiProxy::mMsgQueueLock;
  436. std::condition_variable WasapiProxy::mMsgQueueCond;
  437. int WasapiProxy::messageHandler(std::promise<HRESULT> *promise)
  438. {
  439. TRACE("Starting message thread\n");
  440. HRESULT cohr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  441. if(FAILED(cohr))
  442. {
  443. WARN("Failed to initialize COM: 0x%08lx\n", cohr);
  444. promise->set_value(cohr);
  445. return 0;
  446. }
  447. void *ptr{};
  448. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  449. IID_IMMDeviceEnumerator, &ptr)};
  450. if(FAILED(hr))
  451. {
  452. WARN("Failed to create IMMDeviceEnumerator instance: 0x%08lx\n", hr);
  453. promise->set_value(hr);
  454. CoUninitialize();
  455. return 0;
  456. }
  457. static_cast<IMMDeviceEnumerator*>(ptr)->Release();
  458. CoUninitialize();
  459. TRACE("Message thread initialization complete\n");
  460. promise->set_value(S_OK);
  461. promise = nullptr;
  462. TRACE("Starting message loop\n");
  463. uint deviceCount{0};
  464. while(Msg msg{popMessage()})
  465. {
  466. TRACE("Got message \"%s\" (0x%04x, this=%p, param=%p)\n",
  467. MessageStr[static_cast<size_t>(msg.mType)], static_cast<uint>(msg.mType),
  468. static_cast<void*>(msg.mProxy), static_cast<const void*>(msg.mParam));
  469. switch(msg.mType)
  470. {
  471. case MsgType::OpenDevice:
  472. hr = cohr = S_OK;
  473. if(++deviceCount == 1)
  474. hr = cohr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
  475. if(SUCCEEDED(hr))
  476. hr = msg.mProxy->openProxy(msg.mParam);
  477. msg.mPromise.set_value(hr);
  478. if(FAILED(hr))
  479. {
  480. if(--deviceCount == 0 && SUCCEEDED(cohr))
  481. CoUninitialize();
  482. }
  483. continue;
  484. case MsgType::ReopenDevice:
  485. hr = msg.mProxy->openProxy(msg.mParam);
  486. msg.mPromise.set_value(hr);
  487. continue;
  488. case MsgType::ResetDevice:
  489. hr = msg.mProxy->resetProxy();
  490. msg.mPromise.set_value(hr);
  491. continue;
  492. case MsgType::StartDevice:
  493. hr = msg.mProxy->startProxy();
  494. msg.mPromise.set_value(hr);
  495. continue;
  496. case MsgType::StopDevice:
  497. msg.mProxy->stopProxy();
  498. msg.mPromise.set_value(S_OK);
  499. continue;
  500. case MsgType::CloseDevice:
  501. msg.mProxy->closeProxy();
  502. msg.mPromise.set_value(S_OK);
  503. if(--deviceCount == 0)
  504. CoUninitialize();
  505. continue;
  506. case MsgType::EnumeratePlayback:
  507. case MsgType::EnumerateCapture:
  508. hr = cohr = S_OK;
  509. if(++deviceCount == 1)
  510. hr = cohr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
  511. if(SUCCEEDED(hr))
  512. hr = CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  513. IID_IMMDeviceEnumerator, &ptr);
  514. if(FAILED(hr))
  515. msg.mPromise.set_value(hr);
  516. else
  517. {
  518. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  519. if(msg.mType == MsgType::EnumeratePlayback)
  520. probe_devices(enumerator.get(), eRender, PlaybackDevices);
  521. else if(msg.mType == MsgType::EnumerateCapture)
  522. probe_devices(enumerator.get(), eCapture, CaptureDevices);
  523. msg.mPromise.set_value(S_OK);
  524. }
  525. if(--deviceCount == 0 && SUCCEEDED(cohr))
  526. CoUninitialize();
  527. continue;
  528. case MsgType::QuitThread:
  529. break;
  530. }
  531. ERR("Unexpected message: %u\n", static_cast<uint>(msg.mType));
  532. msg.mPromise.set_value(E_FAIL);
  533. }
  534. TRACE("Message loop finished\n");
  535. return 0;
  536. }
  537. struct WasapiPlayback final : public BackendBase, WasapiProxy {
  538. WasapiPlayback(DeviceBase *device) noexcept : BackendBase{device} { }
  539. ~WasapiPlayback() override;
  540. int mixerProc();
  541. void open(const char *name) override;
  542. HRESULT openProxy(const char *name) override;
  543. void closeProxy() override;
  544. bool reset() override;
  545. HRESULT resetProxy() override;
  546. void start() override;
  547. HRESULT startProxy() override;
  548. void stop() override;
  549. void stopProxy() override;
  550. ClockLatency getClockLatency() override;
  551. HRESULT mOpenStatus{E_FAIL};
  552. ComPtr<IMMDevice> mMMDev{nullptr};
  553. ComPtr<IAudioClient> mClient{nullptr};
  554. ComPtr<IAudioRenderClient> mRender{nullptr};
  555. HANDLE mNotifyEvent{nullptr};
  556. UINT32 mFrameStep{0u};
  557. std::atomic<UINT32> mPadding{0u};
  558. std::mutex mMutex;
  559. std::atomic<bool> mKillNow{true};
  560. std::thread mThread;
  561. DEF_NEWDEL(WasapiPlayback)
  562. };
  563. WasapiPlayback::~WasapiPlayback()
  564. {
  565. if(SUCCEEDED(mOpenStatus))
  566. pushMessage(MsgType::CloseDevice).wait();
  567. mOpenStatus = E_FAIL;
  568. if(mNotifyEvent != nullptr)
  569. CloseHandle(mNotifyEvent);
  570. mNotifyEvent = nullptr;
  571. }
  572. FORCE_ALIGN int WasapiPlayback::mixerProc()
  573. {
  574. HRESULT hr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  575. if(FAILED(hr))
  576. {
  577. ERR("CoInitializeEx(nullptr, COINIT_MULTITHREADED) failed: 0x%08lx\n", hr);
  578. mDevice->handleDisconnect("COM init failed: 0x%08lx", hr);
  579. return 1;
  580. }
  581. SetRTPriority();
  582. althrd_setname(MIXER_THREAD_NAME);
  583. const uint update_size{mDevice->UpdateSize};
  584. const UINT32 buffer_len{mDevice->BufferSize};
  585. while(!mKillNow.load(std::memory_order_relaxed))
  586. {
  587. UINT32 written;
  588. hr = mClient->GetCurrentPadding(&written);
  589. if(FAILED(hr))
  590. {
  591. ERR("Failed to get padding: 0x%08lx\n", hr);
  592. mDevice->handleDisconnect("Failed to retrieve buffer padding: 0x%08lx", hr);
  593. break;
  594. }
  595. mPadding.store(written, std::memory_order_relaxed);
  596. uint len{buffer_len - written};
  597. if(len < update_size)
  598. {
  599. DWORD res{WaitForSingleObjectEx(mNotifyEvent, 2000, FALSE)};
  600. if(res != WAIT_OBJECT_0)
  601. ERR("WaitForSingleObjectEx error: 0x%lx\n", res);
  602. continue;
  603. }
  604. BYTE *buffer;
  605. hr = mRender->GetBuffer(len, &buffer);
  606. if(SUCCEEDED(hr))
  607. {
  608. {
  609. std::lock_guard<std::mutex> _{mMutex};
  610. mDevice->renderSamples(buffer, len, mFrameStep);
  611. mPadding.store(written + len, std::memory_order_relaxed);
  612. }
  613. hr = mRender->ReleaseBuffer(len, 0);
  614. }
  615. if(FAILED(hr))
  616. {
  617. ERR("Failed to buffer data: 0x%08lx\n", hr);
  618. mDevice->handleDisconnect("Failed to send playback samples: 0x%08lx", hr);
  619. break;
  620. }
  621. }
  622. mPadding.store(0u, std::memory_order_release);
  623. CoUninitialize();
  624. return 0;
  625. }
  626. void WasapiPlayback::open(const char *name)
  627. {
  628. HRESULT hr{S_OK};
  629. if(!mNotifyEvent)
  630. {
  631. mNotifyEvent = CreateEventW(nullptr, FALSE, FALSE, nullptr);
  632. if(mNotifyEvent == nullptr)
  633. {
  634. ERR("Failed to create notify events: %lu\n", GetLastError());
  635. hr = E_FAIL;
  636. }
  637. }
  638. if(SUCCEEDED(hr))
  639. {
  640. if(name)
  641. {
  642. if(PlaybackDevices.empty())
  643. pushMessage(MsgType::EnumeratePlayback);
  644. if(std::strncmp(name, DevNameHead, DevNameHeadLen) == 0)
  645. {
  646. name += DevNameHeadLen;
  647. if(*name == '\0')
  648. name = nullptr;
  649. }
  650. }
  651. if(SUCCEEDED(mOpenStatus))
  652. hr = pushMessage(MsgType::ReopenDevice, name).get();
  653. else
  654. {
  655. hr = pushMessage(MsgType::OpenDevice, name).get();
  656. mOpenStatus = hr;
  657. }
  658. }
  659. if(FAILED(hr))
  660. throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
  661. hr};
  662. }
  663. HRESULT WasapiPlayback::openProxy(const char *name)
  664. {
  665. const wchar_t *devid{nullptr};
  666. if(name)
  667. {
  668. auto iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
  669. [name](const DevMap &entry) -> bool
  670. { return entry.name == name || entry.endpoint_guid == name; });
  671. if(iter == PlaybackDevices.cend())
  672. {
  673. const std::wstring wname{utf8_to_wstr(name)};
  674. iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
  675. [&wname](const DevMap &entry) -> bool
  676. { return entry.devid == wname; });
  677. }
  678. if(iter == PlaybackDevices.cend())
  679. {
  680. WARN("Failed to find device name matching \"%s\"\n", name);
  681. return E_FAIL;
  682. }
  683. name = iter->name.c_str();
  684. devid = iter->devid.c_str();
  685. }
  686. void *ptr;
  687. ComPtr<IMMDevice> mmdev;
  688. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  689. IID_IMMDeviceEnumerator, &ptr)};
  690. if(SUCCEEDED(hr))
  691. {
  692. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  693. if(!devid)
  694. hr = enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, mmdev.getPtr());
  695. else
  696. hr = enumerator->GetDevice(devid, mmdev.getPtr());
  697. }
  698. if(FAILED(hr))
  699. {
  700. WARN("Failed to open device \"%s\"\n", name?name:"(default)");
  701. return hr;
  702. }
  703. mClient = nullptr;
  704. mMMDev = std::move(mmdev);
  705. if(name) mDevice->DeviceName = std::string{DevNameHead} + name;
  706. else mDevice->DeviceName = DevNameHead + get_device_name_and_guid(mMMDev.get()).first;
  707. return hr;
  708. }
  709. void WasapiPlayback::closeProxy()
  710. {
  711. mClient = nullptr;
  712. mMMDev = nullptr;
  713. }
  714. bool WasapiPlayback::reset()
  715. {
  716. HRESULT hr{pushMessage(MsgType::ResetDevice).get()};
  717. if(FAILED(hr))
  718. throw al::backend_exception{al::backend_error::DeviceError, "0x%08lx", hr};
  719. return true;
  720. }
  721. HRESULT WasapiPlayback::resetProxy()
  722. {
  723. mClient = nullptr;
  724. void *ptr;
  725. HRESULT hr{mMMDev->Activate(IID_IAudioClient, CLSCTX_INPROC_SERVER, nullptr, &ptr)};
  726. if(FAILED(hr))
  727. {
  728. ERR("Failed to reactivate audio client: 0x%08lx\n", hr);
  729. return hr;
  730. }
  731. mClient = ComPtr<IAudioClient>{static_cast<IAudioClient*>(ptr)};
  732. WAVEFORMATEX *wfx;
  733. hr = mClient->GetMixFormat(&wfx);
  734. if(FAILED(hr))
  735. {
  736. ERR("Failed to get mix format: 0x%08lx\n", hr);
  737. return hr;
  738. }
  739. WAVEFORMATEXTENSIBLE OutputType;
  740. if(!MakeExtensible(&OutputType, wfx))
  741. {
  742. CoTaskMemFree(wfx);
  743. return E_FAIL;
  744. }
  745. CoTaskMemFree(wfx);
  746. wfx = nullptr;
  747. const ReferenceTime per_time{ReferenceTime{seconds{mDevice->UpdateSize}} / mDevice->Frequency};
  748. const ReferenceTime buf_time{ReferenceTime{seconds{mDevice->BufferSize}} / mDevice->Frequency};
  749. if(!mDevice->Flags.test(FrequencyRequest))
  750. mDevice->Frequency = OutputType.Format.nSamplesPerSec;
  751. if(!mDevice->Flags.test(ChannelsRequest))
  752. {
  753. const uint32_t chancount{OutputType.Format.nChannels};
  754. const DWORD chanmask{OutputType.dwChannelMask};
  755. if(chancount >= 8 && (chanmask&X71Mask) == X7DOT1)
  756. mDevice->FmtChans = DevFmtX71;
  757. else if(chancount >= 7 && (chanmask&X61Mask) == X6DOT1)
  758. mDevice->FmtChans = DevFmtX61;
  759. else if(chancount >= 6 && ((chanmask&X51Mask) == X5DOT1
  760. || (chanmask&X51RearMask) == X5DOT1REAR))
  761. mDevice->FmtChans = DevFmtX51;
  762. else if(chancount >= 4 && (chanmask&QuadMask) == QUAD)
  763. mDevice->FmtChans = DevFmtQuad;
  764. else if(chancount >= 2 && (chanmask&StereoMask) == STEREO)
  765. mDevice->FmtChans = DevFmtStereo;
  766. else if(chancount >= 1 && (chanmask&MonoMask) == MONO)
  767. mDevice->FmtChans = DevFmtMono;
  768. else
  769. ERR("Unhandled channel config: %d -- 0x%08lx\n", chancount, chanmask);
  770. }
  771. OutputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  772. switch(mDevice->FmtChans)
  773. {
  774. case DevFmtMono:
  775. OutputType.Format.nChannels = 1;
  776. OutputType.dwChannelMask = MONO;
  777. break;
  778. case DevFmtAmbi3D:
  779. mDevice->FmtChans = DevFmtStereo;
  780. /*fall-through*/
  781. case DevFmtStereo:
  782. OutputType.Format.nChannels = 2;
  783. OutputType.dwChannelMask = STEREO;
  784. break;
  785. case DevFmtQuad:
  786. OutputType.Format.nChannels = 4;
  787. OutputType.dwChannelMask = QUAD;
  788. break;
  789. case DevFmtX51:
  790. OutputType.Format.nChannels = 6;
  791. OutputType.dwChannelMask = X5DOT1;
  792. break;
  793. case DevFmtX61:
  794. OutputType.Format.nChannels = 7;
  795. OutputType.dwChannelMask = X6DOT1;
  796. break;
  797. case DevFmtX71:
  798. OutputType.Format.nChannels = 8;
  799. OutputType.dwChannelMask = X7DOT1;
  800. break;
  801. }
  802. switch(mDevice->FmtType)
  803. {
  804. case DevFmtByte:
  805. mDevice->FmtType = DevFmtUByte;
  806. /* fall-through */
  807. case DevFmtUByte:
  808. OutputType.Format.wBitsPerSample = 8;
  809. OutputType.Samples.wValidBitsPerSample = 8;
  810. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  811. break;
  812. case DevFmtUShort:
  813. mDevice->FmtType = DevFmtShort;
  814. /* fall-through */
  815. case DevFmtShort:
  816. OutputType.Format.wBitsPerSample = 16;
  817. OutputType.Samples.wValidBitsPerSample = 16;
  818. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  819. break;
  820. case DevFmtUInt:
  821. mDevice->FmtType = DevFmtInt;
  822. /* fall-through */
  823. case DevFmtInt:
  824. OutputType.Format.wBitsPerSample = 32;
  825. OutputType.Samples.wValidBitsPerSample = 32;
  826. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  827. break;
  828. case DevFmtFloat:
  829. OutputType.Format.wBitsPerSample = 32;
  830. OutputType.Samples.wValidBitsPerSample = 32;
  831. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  832. break;
  833. }
  834. OutputType.Format.nSamplesPerSec = mDevice->Frequency;
  835. OutputType.Format.nBlockAlign = static_cast<WORD>(OutputType.Format.nChannels *
  836. OutputType.Format.wBitsPerSample / 8);
  837. OutputType.Format.nAvgBytesPerSec = OutputType.Format.nSamplesPerSec *
  838. OutputType.Format.nBlockAlign;
  839. TraceFormat("Requesting playback format", &OutputType.Format);
  840. hr = mClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &OutputType.Format, &wfx);
  841. if(FAILED(hr))
  842. {
  843. ERR("Failed to check format support: 0x%08lx\n", hr);
  844. hr = mClient->GetMixFormat(&wfx);
  845. }
  846. if(FAILED(hr))
  847. {
  848. ERR("Failed to find a supported format: 0x%08lx\n", hr);
  849. return hr;
  850. }
  851. if(wfx != nullptr)
  852. {
  853. TraceFormat("Got playback format", wfx);
  854. if(!MakeExtensible(&OutputType, wfx))
  855. {
  856. CoTaskMemFree(wfx);
  857. return E_FAIL;
  858. }
  859. CoTaskMemFree(wfx);
  860. wfx = nullptr;
  861. mDevice->Frequency = OutputType.Format.nSamplesPerSec;
  862. const uint32_t chancount{OutputType.Format.nChannels};
  863. const DWORD chanmask{OutputType.dwChannelMask};
  864. /* Don't update the channel format if the requested format fits what's
  865. * supported.
  866. */
  867. bool chansok{false};
  868. if(mDevice->Flags.test(ChannelsRequest))
  869. {
  870. switch(mDevice->FmtChans)
  871. {
  872. case DevFmtMono:
  873. chansok = (chancount >= 1 && (chanmask&MonoMask) == MONO);
  874. break;
  875. case DevFmtStereo:
  876. chansok = (chancount >= 2 && (chanmask&StereoMask) == STEREO);
  877. break;
  878. case DevFmtQuad:
  879. chansok = (chancount >= 4 && (chanmask&QuadMask) == QUAD);
  880. break;
  881. case DevFmtX51:
  882. chansok = (chancount >= 6 && ((chanmask&X51Mask) == X5DOT1
  883. || (chanmask&X51RearMask) == X5DOT1REAR));
  884. break;
  885. case DevFmtX61:
  886. chansok = (chancount >= 7 && (chanmask&X61Mask) == X6DOT1);
  887. break;
  888. case DevFmtX71:
  889. chansok = (chancount >= 8 && (chanmask&X71Mask) == X7DOT1);
  890. break;
  891. case DevFmtAmbi3D:
  892. break;
  893. }
  894. }
  895. if(!chansok)
  896. {
  897. if(chancount >= 8 && (chanmask&X71Mask) == X7DOT1)
  898. mDevice->FmtChans = DevFmtX71;
  899. else if(chancount >= 7 && (chanmask&X61Mask) == X6DOT1)
  900. mDevice->FmtChans = DevFmtX61;
  901. else if(chancount >= 6 && ((chanmask&X51Mask) == X5DOT1
  902. || (chanmask&X51RearMask) == X5DOT1REAR))
  903. mDevice->FmtChans = DevFmtX51;
  904. else if(chancount >= 4 && (chanmask&QuadMask) == QUAD)
  905. mDevice->FmtChans = DevFmtQuad;
  906. else if(chancount >= 2 && (chanmask&StereoMask) == STEREO)
  907. mDevice->FmtChans = DevFmtStereo;
  908. else if(chancount >= 1 && (chanmask&MonoMask) == MONO)
  909. mDevice->FmtChans = DevFmtMono;
  910. else
  911. {
  912. ERR("Unhandled extensible channels: %d -- 0x%08lx\n", OutputType.Format.nChannels,
  913. OutputType.dwChannelMask);
  914. mDevice->FmtChans = DevFmtStereo;
  915. OutputType.Format.nChannels = 2;
  916. OutputType.dwChannelMask = STEREO;
  917. }
  918. }
  919. if(IsEqualGUID(OutputType.SubFormat, KSDATAFORMAT_SUBTYPE_PCM))
  920. {
  921. if(OutputType.Format.wBitsPerSample == 8)
  922. mDevice->FmtType = DevFmtUByte;
  923. else if(OutputType.Format.wBitsPerSample == 16)
  924. mDevice->FmtType = DevFmtShort;
  925. else if(OutputType.Format.wBitsPerSample == 32)
  926. mDevice->FmtType = DevFmtInt;
  927. else
  928. {
  929. mDevice->FmtType = DevFmtShort;
  930. OutputType.Format.wBitsPerSample = 16;
  931. }
  932. }
  933. else if(IsEqualGUID(OutputType.SubFormat, KSDATAFORMAT_SUBTYPE_IEEE_FLOAT))
  934. {
  935. mDevice->FmtType = DevFmtFloat;
  936. OutputType.Format.wBitsPerSample = 32;
  937. }
  938. else
  939. {
  940. ERR("Unhandled format sub-type: %s\n", GuidPrinter{OutputType.SubFormat}.c_str());
  941. mDevice->FmtType = DevFmtShort;
  942. if(OutputType.Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
  943. OutputType.Format.wFormatTag = WAVE_FORMAT_PCM;
  944. OutputType.Format.wBitsPerSample = 16;
  945. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  946. }
  947. OutputType.Samples.wValidBitsPerSample = OutputType.Format.wBitsPerSample;
  948. }
  949. mFrameStep = OutputType.Format.nChannels;
  950. const EndpointFormFactor formfactor{get_device_formfactor(mMMDev.get())};
  951. mDevice->Flags.set(DirectEar, (formfactor == Headphones || formfactor == Headset));
  952. setChannelOrderFromWFXMask(OutputType.dwChannelMask);
  953. hr = mClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  954. buf_time.count(), 0, &OutputType.Format, nullptr);
  955. if(FAILED(hr))
  956. {
  957. ERR("Failed to initialize audio client: 0x%08lx\n", hr);
  958. return hr;
  959. }
  960. UINT32 buffer_len{};
  961. ReferenceTime min_per{};
  962. hr = mClient->GetDevicePeriod(&reinterpret_cast<REFERENCE_TIME&>(min_per), nullptr);
  963. if(SUCCEEDED(hr))
  964. hr = mClient->GetBufferSize(&buffer_len);
  965. if(FAILED(hr))
  966. {
  967. ERR("Failed to get audio buffer info: 0x%08lx\n", hr);
  968. return hr;
  969. }
  970. /* Find the nearest multiple of the period size to the update size */
  971. if(min_per < per_time)
  972. min_per *= maxi64((per_time + min_per/2) / min_per, 1);
  973. mDevice->UpdateSize = minu(RefTime2Samples(min_per, mDevice->Frequency), buffer_len/2);
  974. mDevice->BufferSize = buffer_len;
  975. hr = mClient->SetEventHandle(mNotifyEvent);
  976. if(FAILED(hr))
  977. {
  978. ERR("Failed to set event handle: 0x%08lx\n", hr);
  979. return hr;
  980. }
  981. return hr;
  982. }
  983. void WasapiPlayback::start()
  984. {
  985. const HRESULT hr{pushMessage(MsgType::StartDevice).get()};
  986. if(FAILED(hr))
  987. throw al::backend_exception{al::backend_error::DeviceError,
  988. "Failed to start playback: 0x%lx", hr};
  989. }
  990. HRESULT WasapiPlayback::startProxy()
  991. {
  992. ResetEvent(mNotifyEvent);
  993. HRESULT hr{mClient->Start()};
  994. if(FAILED(hr))
  995. {
  996. ERR("Failed to start audio client: 0x%08lx\n", hr);
  997. return hr;
  998. }
  999. void *ptr;
  1000. hr = mClient->GetService(IID_IAudioRenderClient, &ptr);
  1001. if(SUCCEEDED(hr))
  1002. {
  1003. mRender = ComPtr<IAudioRenderClient>{static_cast<IAudioRenderClient*>(ptr)};
  1004. try {
  1005. mKillNow.store(false, std::memory_order_release);
  1006. mThread = std::thread{std::mem_fn(&WasapiPlayback::mixerProc), this};
  1007. }
  1008. catch(...) {
  1009. mRender = nullptr;
  1010. ERR("Failed to start thread\n");
  1011. hr = E_FAIL;
  1012. }
  1013. }
  1014. if(FAILED(hr))
  1015. mClient->Stop();
  1016. return hr;
  1017. }
  1018. void WasapiPlayback::stop()
  1019. { pushMessage(MsgType::StopDevice).wait(); }
  1020. void WasapiPlayback::stopProxy()
  1021. {
  1022. if(!mRender || !mThread.joinable())
  1023. return;
  1024. mKillNow.store(true, std::memory_order_release);
  1025. mThread.join();
  1026. mRender = nullptr;
  1027. mClient->Stop();
  1028. }
  1029. ClockLatency WasapiPlayback::getClockLatency()
  1030. {
  1031. ClockLatency ret;
  1032. std::lock_guard<std::mutex> _{mMutex};
  1033. ret.ClockTime = GetDeviceClockTime(mDevice);
  1034. ret.Latency = std::chrono::seconds{mPadding.load(std::memory_order_relaxed)};
  1035. ret.Latency /= mDevice->Frequency;
  1036. return ret;
  1037. }
  1038. struct WasapiCapture final : public BackendBase, WasapiProxy {
  1039. WasapiCapture(DeviceBase *device) noexcept : BackendBase{device} { }
  1040. ~WasapiCapture() override;
  1041. int recordProc();
  1042. void open(const char *name) override;
  1043. HRESULT openProxy(const char *name) override;
  1044. void closeProxy() override;
  1045. HRESULT resetProxy() override;
  1046. void start() override;
  1047. HRESULT startProxy() override;
  1048. void stop() override;
  1049. void stopProxy() override;
  1050. void captureSamples(al::byte *buffer, uint samples) override;
  1051. uint availableSamples() override;
  1052. HRESULT mOpenStatus{E_FAIL};
  1053. ComPtr<IMMDevice> mMMDev{nullptr};
  1054. ComPtr<IAudioClient> mClient{nullptr};
  1055. ComPtr<IAudioCaptureClient> mCapture{nullptr};
  1056. HANDLE mNotifyEvent{nullptr};
  1057. ChannelConverter mChannelConv{};
  1058. SampleConverterPtr mSampleConv;
  1059. RingBufferPtr mRing;
  1060. std::atomic<bool> mKillNow{true};
  1061. std::thread mThread;
  1062. DEF_NEWDEL(WasapiCapture)
  1063. };
  1064. WasapiCapture::~WasapiCapture()
  1065. {
  1066. if(SUCCEEDED(mOpenStatus))
  1067. pushMessage(MsgType::CloseDevice).wait();
  1068. mOpenStatus = E_FAIL;
  1069. if(mNotifyEvent != nullptr)
  1070. CloseHandle(mNotifyEvent);
  1071. mNotifyEvent = nullptr;
  1072. }
  1073. FORCE_ALIGN int WasapiCapture::recordProc()
  1074. {
  1075. HRESULT hr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  1076. if(FAILED(hr))
  1077. {
  1078. ERR("CoInitializeEx(nullptr, COINIT_MULTITHREADED) failed: 0x%08lx\n", hr);
  1079. mDevice->handleDisconnect("COM init failed: 0x%08lx", hr);
  1080. return 1;
  1081. }
  1082. althrd_setname(RECORD_THREAD_NAME);
  1083. al::vector<float> samples;
  1084. while(!mKillNow.load(std::memory_order_relaxed))
  1085. {
  1086. UINT32 avail;
  1087. hr = mCapture->GetNextPacketSize(&avail);
  1088. if(FAILED(hr))
  1089. ERR("Failed to get next packet size: 0x%08lx\n", hr);
  1090. else if(avail > 0)
  1091. {
  1092. UINT32 numsamples;
  1093. DWORD flags;
  1094. BYTE *rdata;
  1095. hr = mCapture->GetBuffer(&rdata, &numsamples, &flags, nullptr, nullptr);
  1096. if(FAILED(hr))
  1097. ERR("Failed to get capture buffer: 0x%08lx\n", hr);
  1098. else
  1099. {
  1100. if(mChannelConv.is_active())
  1101. {
  1102. samples.resize(numsamples*2);
  1103. mChannelConv.convert(rdata, samples.data(), numsamples);
  1104. rdata = reinterpret_cast<BYTE*>(samples.data());
  1105. }
  1106. auto data = mRing->getWriteVector();
  1107. size_t dstframes;
  1108. if(mSampleConv)
  1109. {
  1110. const void *srcdata{rdata};
  1111. uint srcframes{numsamples};
  1112. dstframes = mSampleConv->convert(&srcdata, &srcframes, data.first.buf,
  1113. static_cast<uint>(minz(data.first.len, INT_MAX)));
  1114. if(srcframes > 0 && dstframes == data.first.len && data.second.len > 0)
  1115. {
  1116. /* If some source samples remain, all of the first dest
  1117. * block was filled, and there's space in the second
  1118. * dest block, do another run for the second block.
  1119. */
  1120. dstframes += mSampleConv->convert(&srcdata, &srcframes, data.second.buf,
  1121. static_cast<uint>(minz(data.second.len, INT_MAX)));
  1122. }
  1123. }
  1124. else
  1125. {
  1126. const uint framesize{mDevice->frameSizeFromFmt()};
  1127. size_t len1{minz(data.first.len, numsamples)};
  1128. size_t len2{minz(data.second.len, numsamples-len1)};
  1129. memcpy(data.first.buf, rdata, len1*framesize);
  1130. if(len2 > 0)
  1131. memcpy(data.second.buf, rdata+len1*framesize, len2*framesize);
  1132. dstframes = len1 + len2;
  1133. }
  1134. mRing->writeAdvance(dstframes);
  1135. hr = mCapture->ReleaseBuffer(numsamples);
  1136. if(FAILED(hr)) ERR("Failed to release capture buffer: 0x%08lx\n", hr);
  1137. }
  1138. }
  1139. if(FAILED(hr))
  1140. {
  1141. mDevice->handleDisconnect("Failed to capture samples: 0x%08lx", hr);
  1142. break;
  1143. }
  1144. DWORD res{WaitForSingleObjectEx(mNotifyEvent, 2000, FALSE)};
  1145. if(res != WAIT_OBJECT_0)
  1146. ERR("WaitForSingleObjectEx error: 0x%lx\n", res);
  1147. }
  1148. CoUninitialize();
  1149. return 0;
  1150. }
  1151. void WasapiCapture::open(const char *name)
  1152. {
  1153. HRESULT hr{S_OK};
  1154. mNotifyEvent = CreateEventW(nullptr, FALSE, FALSE, nullptr);
  1155. if(mNotifyEvent == nullptr)
  1156. {
  1157. ERR("Failed to create notify event: %lu\n", GetLastError());
  1158. hr = E_FAIL;
  1159. }
  1160. if(SUCCEEDED(hr))
  1161. {
  1162. if(name)
  1163. {
  1164. if(CaptureDevices.empty())
  1165. pushMessage(MsgType::EnumerateCapture);
  1166. if(std::strncmp(name, DevNameHead, DevNameHeadLen) == 0)
  1167. {
  1168. name += DevNameHeadLen;
  1169. if(*name == '\0')
  1170. name = nullptr;
  1171. }
  1172. }
  1173. hr = pushMessage(MsgType::OpenDevice, name).get();
  1174. }
  1175. mOpenStatus = hr;
  1176. if(FAILED(hr))
  1177. throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
  1178. hr};
  1179. hr = pushMessage(MsgType::ResetDevice).get();
  1180. if(FAILED(hr))
  1181. {
  1182. if(hr == E_OUTOFMEMORY)
  1183. throw al::backend_exception{al::backend_error::OutOfMemory, "Out of memory"};
  1184. throw al::backend_exception{al::backend_error::DeviceError, "Device reset failed"};
  1185. }
  1186. }
  1187. HRESULT WasapiCapture::openProxy(const char *name)
  1188. {
  1189. const wchar_t *devid{nullptr};
  1190. if(name)
  1191. {
  1192. auto iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
  1193. [name](const DevMap &entry) -> bool
  1194. { return entry.name == name || entry.endpoint_guid == name; });
  1195. if(iter == CaptureDevices.cend())
  1196. {
  1197. const std::wstring wname{utf8_to_wstr(name)};
  1198. iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
  1199. [&wname](const DevMap &entry) -> bool
  1200. { return entry.devid == wname; });
  1201. }
  1202. if(iter == CaptureDevices.cend())
  1203. {
  1204. WARN("Failed to find device name matching \"%s\"\n", name);
  1205. return E_FAIL;
  1206. }
  1207. name = iter->name.c_str();
  1208. devid = iter->devid.c_str();
  1209. }
  1210. void *ptr;
  1211. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  1212. IID_IMMDeviceEnumerator, &ptr)};
  1213. if(SUCCEEDED(hr))
  1214. {
  1215. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  1216. if(!devid)
  1217. hr = enumerator->GetDefaultAudioEndpoint(eCapture, eMultimedia, mMMDev.getPtr());
  1218. else
  1219. hr = enumerator->GetDevice(devid, mMMDev.getPtr());
  1220. }
  1221. if(FAILED(hr))
  1222. {
  1223. WARN("Failed to open device \"%s\"\n", name?name:"(default)");
  1224. return hr;
  1225. }
  1226. mClient = nullptr;
  1227. if(name) mDevice->DeviceName = std::string{DevNameHead} + name;
  1228. else mDevice->DeviceName = DevNameHead + get_device_name_and_guid(mMMDev.get()).first;
  1229. return hr;
  1230. }
  1231. void WasapiCapture::closeProxy()
  1232. {
  1233. mClient = nullptr;
  1234. mMMDev = nullptr;
  1235. }
  1236. HRESULT WasapiCapture::resetProxy()
  1237. {
  1238. mClient = nullptr;
  1239. void *ptr;
  1240. HRESULT hr{mMMDev->Activate(IID_IAudioClient, CLSCTX_INPROC_SERVER, nullptr, &ptr)};
  1241. if(FAILED(hr))
  1242. {
  1243. ERR("Failed to reactivate audio client: 0x%08lx\n", hr);
  1244. return hr;
  1245. }
  1246. mClient = ComPtr<IAudioClient>{static_cast<IAudioClient*>(ptr)};
  1247. // Make sure buffer is at least 100ms in size
  1248. ReferenceTime buf_time{ReferenceTime{seconds{mDevice->BufferSize}} / mDevice->Frequency};
  1249. buf_time = std::max(buf_time, ReferenceTime{milliseconds{100}});
  1250. WAVEFORMATEXTENSIBLE InputType{};
  1251. InputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  1252. switch(mDevice->FmtChans)
  1253. {
  1254. case DevFmtMono:
  1255. InputType.Format.nChannels = 1;
  1256. InputType.dwChannelMask = MONO;
  1257. break;
  1258. case DevFmtStereo:
  1259. InputType.Format.nChannels = 2;
  1260. InputType.dwChannelMask = STEREO;
  1261. break;
  1262. case DevFmtQuad:
  1263. InputType.Format.nChannels = 4;
  1264. InputType.dwChannelMask = QUAD;
  1265. break;
  1266. case DevFmtX51:
  1267. InputType.Format.nChannels = 6;
  1268. InputType.dwChannelMask = X5DOT1;
  1269. break;
  1270. case DevFmtX61:
  1271. InputType.Format.nChannels = 7;
  1272. InputType.dwChannelMask = X6DOT1;
  1273. break;
  1274. case DevFmtX71:
  1275. InputType.Format.nChannels = 8;
  1276. InputType.dwChannelMask = X7DOT1;
  1277. break;
  1278. case DevFmtAmbi3D:
  1279. return E_FAIL;
  1280. }
  1281. switch(mDevice->FmtType)
  1282. {
  1283. /* NOTE: Signedness doesn't matter, the converter will handle it. */
  1284. case DevFmtByte:
  1285. case DevFmtUByte:
  1286. InputType.Format.wBitsPerSample = 8;
  1287. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1288. break;
  1289. case DevFmtShort:
  1290. case DevFmtUShort:
  1291. InputType.Format.wBitsPerSample = 16;
  1292. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1293. break;
  1294. case DevFmtInt:
  1295. case DevFmtUInt:
  1296. InputType.Format.wBitsPerSample = 32;
  1297. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1298. break;
  1299. case DevFmtFloat:
  1300. InputType.Format.wBitsPerSample = 32;
  1301. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  1302. break;
  1303. }
  1304. InputType.Samples.wValidBitsPerSample = InputType.Format.wBitsPerSample;
  1305. InputType.Format.nSamplesPerSec = mDevice->Frequency;
  1306. InputType.Format.nBlockAlign = static_cast<WORD>(InputType.Format.nChannels *
  1307. InputType.Format.wBitsPerSample / 8);
  1308. InputType.Format.nAvgBytesPerSec = InputType.Format.nSamplesPerSec *
  1309. InputType.Format.nBlockAlign;
  1310. InputType.Format.cbSize = sizeof(InputType) - sizeof(InputType.Format);
  1311. TraceFormat("Requesting capture format", &InputType.Format);
  1312. WAVEFORMATEX *wfx;
  1313. hr = mClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &InputType.Format, &wfx);
  1314. if(FAILED(hr))
  1315. {
  1316. ERR("Failed to check format support: 0x%08lx\n", hr);
  1317. return hr;
  1318. }
  1319. mSampleConv = nullptr;
  1320. mChannelConv = {};
  1321. if(wfx != nullptr)
  1322. {
  1323. TraceFormat("Got capture format", wfx);
  1324. if(!MakeExtensible(&InputType, wfx))
  1325. {
  1326. CoTaskMemFree(wfx);
  1327. return E_FAIL;
  1328. }
  1329. CoTaskMemFree(wfx);
  1330. wfx = nullptr;
  1331. auto validate_fmt = [](DeviceBase *device, uint32_t chancount, DWORD chanmask) noexcept
  1332. -> bool
  1333. {
  1334. switch(device->FmtChans)
  1335. {
  1336. /* If the device wants mono, we can handle any input. */
  1337. case DevFmtMono:
  1338. return true;
  1339. /* If the device wants stereo, we can handle mono or stereo input. */
  1340. case DevFmtStereo:
  1341. return (chancount == 2 && (chanmask == 0 || (chanmask&StereoMask) == STEREO))
  1342. || (chancount == 1 && (chanmask&MonoMask) == MONO);
  1343. /* Otherwise, the device must match the input type. */
  1344. case DevFmtQuad:
  1345. return (chancount == 4 && (chanmask == 0 || (chanmask&QuadMask) == QUAD));
  1346. /* 5.1 (Side) and 5.1 (Rear) are interchangeable here. */
  1347. case DevFmtX51:
  1348. return (chancount == 6 && (chanmask == 0 || (chanmask&X51Mask) == X5DOT1
  1349. || (chanmask&X51RearMask) == X5DOT1REAR));
  1350. case DevFmtX61:
  1351. return (chancount == 7 && (chanmask == 0 || (chanmask&X61Mask) == X6DOT1));
  1352. case DevFmtX71:
  1353. return (chancount == 8 && (chanmask == 0 || (chanmask&X71Mask) == X7DOT1));
  1354. case DevFmtAmbi3D:
  1355. return (chanmask == 0 && chancount == device->channelsFromFmt());
  1356. }
  1357. return false;
  1358. };
  1359. if(!validate_fmt(mDevice, InputType.Format.nChannels, InputType.dwChannelMask))
  1360. {
  1361. ERR("Failed to match format, wanted: %s %s %uhz, got: 0x%08lx mask %d channel%s %d-bit %luhz\n",
  1362. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1363. mDevice->Frequency, InputType.dwChannelMask, InputType.Format.nChannels,
  1364. (InputType.Format.nChannels==1)?"":"s", InputType.Format.wBitsPerSample,
  1365. InputType.Format.nSamplesPerSec);
  1366. return E_FAIL;
  1367. }
  1368. }
  1369. DevFmtType srcType{};
  1370. if(IsEqualGUID(InputType.SubFormat, KSDATAFORMAT_SUBTYPE_PCM))
  1371. {
  1372. if(InputType.Format.wBitsPerSample == 8)
  1373. srcType = DevFmtUByte;
  1374. else if(InputType.Format.wBitsPerSample == 16)
  1375. srcType = DevFmtShort;
  1376. else if(InputType.Format.wBitsPerSample == 32)
  1377. srcType = DevFmtInt;
  1378. else
  1379. {
  1380. ERR("Unhandled integer bit depth: %d\n", InputType.Format.wBitsPerSample);
  1381. return E_FAIL;
  1382. }
  1383. }
  1384. else if(IsEqualGUID(InputType.SubFormat, KSDATAFORMAT_SUBTYPE_IEEE_FLOAT))
  1385. {
  1386. if(InputType.Format.wBitsPerSample == 32)
  1387. srcType = DevFmtFloat;
  1388. else
  1389. {
  1390. ERR("Unhandled float bit depth: %d\n", InputType.Format.wBitsPerSample);
  1391. return E_FAIL;
  1392. }
  1393. }
  1394. else
  1395. {
  1396. ERR("Unhandled format sub-type: %s\n", GuidPrinter{InputType.SubFormat}.c_str());
  1397. return E_FAIL;
  1398. }
  1399. if(mDevice->FmtChans == DevFmtMono && InputType.Format.nChannels != 1)
  1400. {
  1401. uint chanmask{(1u<<InputType.Format.nChannels) - 1u};
  1402. /* Exclude LFE from the downmix. */
  1403. if((InputType.dwChannelMask&SPEAKER_LOW_FREQUENCY))
  1404. {
  1405. constexpr auto lfemask = MaskFromTopBits(SPEAKER_LOW_FREQUENCY);
  1406. const int lfeidx{al::popcount(InputType.dwChannelMask&lfemask) - 1};
  1407. chanmask &= ~(1u << lfeidx);
  1408. }
  1409. mChannelConv = ChannelConverter{srcType, InputType.Format.nChannels, chanmask,
  1410. mDevice->FmtChans};
  1411. TRACE("Created %s multichannel-to-mono converter\n", DevFmtTypeString(srcType));
  1412. /* The channel converter always outputs float, so change the input type
  1413. * for the resampler/type-converter.
  1414. */
  1415. srcType = DevFmtFloat;
  1416. }
  1417. else if(mDevice->FmtChans == DevFmtStereo && InputType.Format.nChannels == 1)
  1418. {
  1419. mChannelConv = ChannelConverter{srcType, 1, 0x1, mDevice->FmtChans};
  1420. TRACE("Created %s mono-to-stereo converter\n", DevFmtTypeString(srcType));
  1421. srcType = DevFmtFloat;
  1422. }
  1423. if(mDevice->Frequency != InputType.Format.nSamplesPerSec || mDevice->FmtType != srcType)
  1424. {
  1425. mSampleConv = CreateSampleConverter(srcType, mDevice->FmtType, mDevice->channelsFromFmt(),
  1426. InputType.Format.nSamplesPerSec, mDevice->Frequency, Resampler::FastBSinc24);
  1427. if(!mSampleConv)
  1428. {
  1429. ERR("Failed to create converter for %s format, dst: %s %uhz, src: %s %luhz\n",
  1430. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1431. mDevice->Frequency, DevFmtTypeString(srcType), InputType.Format.nSamplesPerSec);
  1432. return E_FAIL;
  1433. }
  1434. TRACE("Created converter for %s format, dst: %s %uhz, src: %s %luhz\n",
  1435. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1436. mDevice->Frequency, DevFmtTypeString(srcType), InputType.Format.nSamplesPerSec);
  1437. }
  1438. hr = mClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  1439. buf_time.count(), 0, &InputType.Format, nullptr);
  1440. if(FAILED(hr))
  1441. {
  1442. ERR("Failed to initialize audio client: 0x%08lx\n", hr);
  1443. return hr;
  1444. }
  1445. UINT32 buffer_len{};
  1446. ReferenceTime min_per{};
  1447. hr = mClient->GetDevicePeriod(&reinterpret_cast<REFERENCE_TIME&>(min_per), nullptr);
  1448. if(SUCCEEDED(hr))
  1449. hr = mClient->GetBufferSize(&buffer_len);
  1450. if(FAILED(hr))
  1451. {
  1452. ERR("Failed to get buffer size: 0x%08lx\n", hr);
  1453. return hr;
  1454. }
  1455. mDevice->UpdateSize = RefTime2Samples(min_per, mDevice->Frequency);
  1456. mDevice->BufferSize = buffer_len;
  1457. mRing = RingBuffer::Create(buffer_len, mDevice->frameSizeFromFmt(), false);
  1458. hr = mClient->SetEventHandle(mNotifyEvent);
  1459. if(FAILED(hr))
  1460. {
  1461. ERR("Failed to set event handle: 0x%08lx\n", hr);
  1462. return hr;
  1463. }
  1464. return hr;
  1465. }
  1466. void WasapiCapture::start()
  1467. {
  1468. const HRESULT hr{pushMessage(MsgType::StartDevice).get()};
  1469. if(FAILED(hr))
  1470. throw al::backend_exception{al::backend_error::DeviceError,
  1471. "Failed to start recording: 0x%lx", hr};
  1472. }
  1473. HRESULT WasapiCapture::startProxy()
  1474. {
  1475. ResetEvent(mNotifyEvent);
  1476. HRESULT hr{mClient->Start()};
  1477. if(FAILED(hr))
  1478. {
  1479. ERR("Failed to start audio client: 0x%08lx\n", hr);
  1480. return hr;
  1481. }
  1482. void *ptr;
  1483. hr = mClient->GetService(IID_IAudioCaptureClient, &ptr);
  1484. if(SUCCEEDED(hr))
  1485. {
  1486. mCapture = ComPtr<IAudioCaptureClient>{static_cast<IAudioCaptureClient*>(ptr)};
  1487. try {
  1488. mKillNow.store(false, std::memory_order_release);
  1489. mThread = std::thread{std::mem_fn(&WasapiCapture::recordProc), this};
  1490. }
  1491. catch(...) {
  1492. mCapture = nullptr;
  1493. ERR("Failed to start thread\n");
  1494. hr = E_FAIL;
  1495. }
  1496. }
  1497. if(FAILED(hr))
  1498. {
  1499. mClient->Stop();
  1500. mClient->Reset();
  1501. }
  1502. return hr;
  1503. }
  1504. void WasapiCapture::stop()
  1505. { pushMessage(MsgType::StopDevice).wait(); }
  1506. void WasapiCapture::stopProxy()
  1507. {
  1508. if(!mCapture || !mThread.joinable())
  1509. return;
  1510. mKillNow.store(true, std::memory_order_release);
  1511. mThread.join();
  1512. mCapture = nullptr;
  1513. mClient->Stop();
  1514. mClient->Reset();
  1515. }
  1516. void WasapiCapture::captureSamples(al::byte *buffer, uint samples)
  1517. { mRing->read(buffer, samples); }
  1518. uint WasapiCapture::availableSamples()
  1519. { return static_cast<uint>(mRing->readSpace()); }
  1520. } // namespace
  1521. bool WasapiBackendFactory::init()
  1522. {
  1523. static HRESULT InitResult{E_FAIL};
  1524. if(FAILED(InitResult)) try
  1525. {
  1526. std::promise<HRESULT> promise;
  1527. auto future = promise.get_future();
  1528. std::thread{&WasapiProxy::messageHandler, &promise}.detach();
  1529. InitResult = future.get();
  1530. }
  1531. catch(...) {
  1532. }
  1533. return SUCCEEDED(InitResult);
  1534. }
  1535. bool WasapiBackendFactory::querySupport(BackendType type)
  1536. { return type == BackendType::Playback || type == BackendType::Capture; }
  1537. std::string WasapiBackendFactory::probe(BackendType type)
  1538. {
  1539. std::string outnames;
  1540. switch(type)
  1541. {
  1542. case BackendType::Playback:
  1543. WasapiProxy::pushMessageStatic(MsgType::EnumeratePlayback).wait();
  1544. for(const DevMap &entry : PlaybackDevices)
  1545. {
  1546. /* +1 to also append the null char (to ensure a null-separated list
  1547. * and double-null terminated list).
  1548. */
  1549. outnames.append(DevNameHead).append(entry.name.c_str(), entry.name.length()+1);
  1550. }
  1551. break;
  1552. case BackendType::Capture:
  1553. WasapiProxy::pushMessageStatic(MsgType::EnumerateCapture).wait();
  1554. for(const DevMap &entry : CaptureDevices)
  1555. outnames.append(DevNameHead).append(entry.name.c_str(), entry.name.length()+1);
  1556. break;
  1557. }
  1558. return outnames;
  1559. }
  1560. BackendPtr WasapiBackendFactory::createBackend(DeviceBase *device, BackendType type)
  1561. {
  1562. if(type == BackendType::Playback)
  1563. return BackendPtr{new WasapiPlayback{device}};
  1564. if(type == BackendType::Capture)
  1565. return BackendPtr{new WasapiCapture{device}};
  1566. return nullptr;
  1567. }
  1568. BackendFactory &WasapiBackendFactory::getFactory()
  1569. {
  1570. static WasapiBackendFactory factory{};
  1571. return factory;
  1572. }