🛠️🐜 Antkeeper superbuild with dependencies included https://antkeeper.com
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1863 lines
58 KiB

  1. /**
  2. * OpenAL cross platform audio library
  3. * Copyright (C) 2011 by authors.
  4. * This library is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Library General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2 of the License, or (at your option) any later version.
  8. *
  9. * This library is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Library General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Library General Public
  15. * License along with this library; if not, write to the
  16. * Free Software Foundation, Inc.,
  17. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  18. * Or go to http://www.gnu.org/copyleft/lgpl.html
  19. */
  20. #include "config.h"
  21. #include "wasapi.h"
  22. #define WIN32_LEAN_AND_MEAN
  23. #include <windows.h>
  24. #include <stdlib.h>
  25. #include <stdio.h>
  26. #include <memory.h>
  27. #include <wtypes.h>
  28. #include <mmdeviceapi.h>
  29. #include <audioclient.h>
  30. #include <cguid.h>
  31. #include <devpropdef.h>
  32. #include <mmreg.h>
  33. #include <propsys.h>
  34. #include <propkey.h>
  35. #include <devpkey.h>
  36. #ifndef _WAVEFORMATEXTENSIBLE_
  37. #include <ks.h>
  38. #include <ksmedia.h>
  39. #endif
  40. #include <algorithm>
  41. #include <atomic>
  42. #include <chrono>
  43. #include <condition_variable>
  44. #include <cstring>
  45. #include <deque>
  46. #include <functional>
  47. #include <future>
  48. #include <mutex>
  49. #include <string>
  50. #include <thread>
  51. #include <vector>
  52. #include "albit.h"
  53. #include "alnumeric.h"
  54. #include "comptr.h"
  55. #include "core/converter.h"
  56. #include "core/device.h"
  57. #include "core/helpers.h"
  58. #include "core/logging.h"
  59. #include "ringbuffer.h"
  60. #include "strutils.h"
  61. #include "threads.h"
  62. /* Some headers seem to define these as macros for __uuidof, which is annoying
  63. * since some headers don't declare them at all. Hopefully the ifdef is enough
  64. * to tell if they need to be declared.
  65. */
  66. #ifndef KSDATAFORMAT_SUBTYPE_PCM
  67. DEFINE_GUID(KSDATAFORMAT_SUBTYPE_PCM, 0x00000001, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
  68. #endif
  69. #ifndef KSDATAFORMAT_SUBTYPE_IEEE_FLOAT
  70. DEFINE_GUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, 0x00000003, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
  71. #endif
  72. DEFINE_DEVPROPKEY(DEVPKEY_Device_FriendlyName, 0xa45c254e, 0xdf1c, 0x4efd, 0x80,0x20, 0x67,0xd1,0x46,0xa8,0x50,0xe0, 14);
  73. DEFINE_PROPERTYKEY(PKEY_AudioEndpoint_FormFactor, 0x1da5d803, 0xd492, 0x4edd, 0x8c,0x23, 0xe0,0xc0,0xff,0xee,0x7f,0x0e, 0);
  74. DEFINE_PROPERTYKEY(PKEY_AudioEndpoint_GUID, 0x1da5d803, 0xd492, 0x4edd, 0x8c, 0x23,0xe0, 0xc0,0xff,0xee,0x7f,0x0e, 4 );
  75. namespace {
  76. using std::chrono::milliseconds;
  77. using std::chrono::seconds;
  78. using ReferenceTime = std::chrono::duration<REFERENCE_TIME,std::ratio<1,10000000>>;
  79. inline constexpr ReferenceTime operator "" _reftime(unsigned long long int n) noexcept
  80. { return ReferenceTime{static_cast<REFERENCE_TIME>(n)}; }
  81. #define MONO SPEAKER_FRONT_CENTER
  82. #define STEREO (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT)
  83. #define QUAD (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
  84. #define X5DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  85. #define X5DOT1REAR (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT)
  86. #define X6DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_CENTER|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  87. #define X7DOT1 (SPEAKER_FRONT_LEFT|SPEAKER_FRONT_RIGHT|SPEAKER_FRONT_CENTER|SPEAKER_LOW_FREQUENCY|SPEAKER_BACK_LEFT|SPEAKER_BACK_RIGHT|SPEAKER_SIDE_LEFT|SPEAKER_SIDE_RIGHT)
  88. constexpr inline DWORD MaskFromTopBits(DWORD b) noexcept
  89. {
  90. b |= b>>1;
  91. b |= b>>2;
  92. b |= b>>4;
  93. b |= b>>8;
  94. b |= b>>16;
  95. return b;
  96. }
  97. constexpr DWORD MonoMask{MaskFromTopBits(MONO)};
  98. constexpr DWORD StereoMask{MaskFromTopBits(STEREO)};
  99. constexpr DWORD QuadMask{MaskFromTopBits(QUAD)};
  100. constexpr DWORD X51Mask{MaskFromTopBits(X5DOT1)};
  101. constexpr DWORD X51RearMask{MaskFromTopBits(X5DOT1REAR)};
  102. constexpr DWORD X61Mask{MaskFromTopBits(X6DOT1)};
  103. constexpr DWORD X71Mask{MaskFromTopBits(X7DOT1)};
  104. constexpr char DevNameHead[] = "OpenAL Soft on ";
  105. constexpr size_t DevNameHeadLen{al::size(DevNameHead) - 1};
  106. /* Scales the given reftime value, rounding the result. */
  107. inline uint RefTime2Samples(const ReferenceTime &val, uint srate)
  108. {
  109. const auto retval = (val*srate + ReferenceTime{seconds{1}}/2) / seconds{1};
  110. return static_cast<uint>(mini64(retval, std::numeric_limits<uint>::max()));
  111. }
  112. class GuidPrinter {
  113. char mMsg[64];
  114. public:
  115. GuidPrinter(const GUID &guid)
  116. {
  117. std::snprintf(mMsg, al::size(mMsg), "{%08lx-%04x-%04x-%02x%02x-%02x%02x%02x%02x%02x%02x}",
  118. DWORD{guid.Data1}, guid.Data2, guid.Data3, guid.Data4[0], guid.Data4[1], guid.Data4[2],
  119. guid.Data4[3], guid.Data4[4], guid.Data4[5], guid.Data4[6], guid.Data4[7]);
  120. }
  121. const char *c_str() const { return mMsg; }
  122. };
  123. struct PropVariant {
  124. PROPVARIANT mProp;
  125. public:
  126. PropVariant() { PropVariantInit(&mProp); }
  127. ~PropVariant() { clear(); }
  128. void clear() { PropVariantClear(&mProp); }
  129. PROPVARIANT* get() noexcept { return &mProp; }
  130. PROPVARIANT& operator*() noexcept { return mProp; }
  131. const PROPVARIANT& operator*() const noexcept { return mProp; }
  132. PROPVARIANT* operator->() noexcept { return &mProp; }
  133. const PROPVARIANT* operator->() const noexcept { return &mProp; }
  134. };
  135. struct DevMap {
  136. std::string name;
  137. std::string endpoint_guid; // obtained from PKEY_AudioEndpoint_GUID , set to "Unknown device GUID" if absent.
  138. std::wstring devid;
  139. template<typename T0, typename T1, typename T2>
  140. DevMap(T0&& name_, T1&& guid_, T2&& devid_)
  141. : name{std::forward<T0>(name_)}
  142. , endpoint_guid{std::forward<T1>(guid_)}
  143. , devid{std::forward<T2>(devid_)}
  144. { }
  145. };
  146. bool checkName(const al::vector<DevMap> &list, const std::string &name)
  147. {
  148. auto match_name = [&name](const DevMap &entry) -> bool
  149. { return entry.name == name; };
  150. return std::find_if(list.cbegin(), list.cend(), match_name) != list.cend();
  151. }
  152. al::vector<DevMap> PlaybackDevices;
  153. al::vector<DevMap> CaptureDevices;
  154. using NameGUIDPair = std::pair<std::string,std::string>;
  155. NameGUIDPair get_device_name_and_guid(IMMDevice *device)
  156. {
  157. static constexpr char UnknownName[]{"Unknown Device Name"};
  158. static constexpr char UnknownGuid[]{"Unknown Device GUID"};
  159. std::string name, guid;
  160. ComPtr<IPropertyStore> ps;
  161. HRESULT hr = device->OpenPropertyStore(STGM_READ, ps.getPtr());
  162. if(FAILED(hr))
  163. {
  164. WARN("OpenPropertyStore failed: 0x%08lx\n", hr);
  165. return std::make_pair(UnknownName, UnknownGuid);
  166. }
  167. PropVariant pvprop;
  168. hr = ps->GetValue(reinterpret_cast<const PROPERTYKEY&>(DEVPKEY_Device_FriendlyName), pvprop.get());
  169. if(FAILED(hr))
  170. {
  171. WARN("GetValue Device_FriendlyName failed: 0x%08lx\n", hr);
  172. name += UnknownName;
  173. }
  174. else if(pvprop->vt == VT_LPWSTR)
  175. name += wstr_to_utf8(pvprop->pwszVal);
  176. else
  177. {
  178. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvprop->vt);
  179. name += UnknownName;
  180. }
  181. pvprop.clear();
  182. hr = ps->GetValue(reinterpret_cast<const PROPERTYKEY&>(PKEY_AudioEndpoint_GUID), pvprop.get());
  183. if(FAILED(hr))
  184. {
  185. WARN("GetValue AudioEndpoint_GUID failed: 0x%08lx\n", hr);
  186. guid = UnknownGuid;
  187. }
  188. else if(pvprop->vt == VT_LPWSTR)
  189. guid = wstr_to_utf8(pvprop->pwszVal);
  190. else
  191. {
  192. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvprop->vt);
  193. guid = UnknownGuid;
  194. }
  195. return std::make_pair(std::move(name), std::move(guid));
  196. }
  197. EndpointFormFactor get_device_formfactor(IMMDevice *device)
  198. {
  199. ComPtr<IPropertyStore> ps;
  200. HRESULT hr{device->OpenPropertyStore(STGM_READ, ps.getPtr())};
  201. if(FAILED(hr))
  202. {
  203. WARN("OpenPropertyStore failed: 0x%08lx\n", hr);
  204. return UnknownFormFactor;
  205. }
  206. EndpointFormFactor formfactor{UnknownFormFactor};
  207. PropVariant pvform;
  208. hr = ps->GetValue(PKEY_AudioEndpoint_FormFactor, pvform.get());
  209. if(FAILED(hr))
  210. WARN("GetValue AudioEndpoint_FormFactor failed: 0x%08lx\n", hr);
  211. else if(pvform->vt == VT_UI4)
  212. formfactor = static_cast<EndpointFormFactor>(pvform->ulVal);
  213. else if(pvform->vt != VT_EMPTY)
  214. WARN("Unexpected PROPVARIANT type: 0x%04x\n", pvform->vt);
  215. return formfactor;
  216. }
  217. void add_device(IMMDevice *device, const WCHAR *devid, al::vector<DevMap> &list)
  218. {
  219. for(auto &entry : list)
  220. {
  221. if(entry.devid == devid)
  222. return;
  223. }
  224. auto name_guid = get_device_name_and_guid(device);
  225. int count{1};
  226. std::string newname{name_guid.first};
  227. while(checkName(list, newname))
  228. {
  229. newname = name_guid.first;
  230. newname += " #";
  231. newname += std::to_string(++count);
  232. }
  233. list.emplace_back(std::move(newname), std::move(name_guid.second), devid);
  234. const DevMap &newentry = list.back();
  235. TRACE("Got device \"%s\", \"%s\", \"%ls\"\n", newentry.name.c_str(),
  236. newentry.endpoint_guid.c_str(), newentry.devid.c_str());
  237. }
  238. WCHAR *get_device_id(IMMDevice *device)
  239. {
  240. WCHAR *devid;
  241. const HRESULT hr{device->GetId(&devid)};
  242. if(FAILED(hr))
  243. {
  244. ERR("Failed to get device id: %lx\n", hr);
  245. return nullptr;
  246. }
  247. return devid;
  248. }
  249. void probe_devices(IMMDeviceEnumerator *devenum, EDataFlow flowdir, al::vector<DevMap> &list)
  250. {
  251. al::vector<DevMap>{}.swap(list);
  252. ComPtr<IMMDeviceCollection> coll;
  253. HRESULT hr{devenum->EnumAudioEndpoints(flowdir, DEVICE_STATE_ACTIVE, coll.getPtr())};
  254. if(FAILED(hr))
  255. {
  256. ERR("Failed to enumerate audio endpoints: 0x%08lx\n", hr);
  257. return;
  258. }
  259. UINT count{0};
  260. hr = coll->GetCount(&count);
  261. if(SUCCEEDED(hr) && count > 0)
  262. list.reserve(count);
  263. ComPtr<IMMDevice> device;
  264. hr = devenum->GetDefaultAudioEndpoint(flowdir, eMultimedia, device.getPtr());
  265. if(SUCCEEDED(hr))
  266. {
  267. if(WCHAR *devid{get_device_id(device.get())})
  268. {
  269. add_device(device.get(), devid, list);
  270. CoTaskMemFree(devid);
  271. }
  272. device = nullptr;
  273. }
  274. for(UINT i{0};i < count;++i)
  275. {
  276. hr = coll->Item(i, device.getPtr());
  277. if(FAILED(hr)) continue;
  278. if(WCHAR *devid{get_device_id(device.get())})
  279. {
  280. add_device(device.get(), devid, list);
  281. CoTaskMemFree(devid);
  282. }
  283. device = nullptr;
  284. }
  285. }
  286. bool MakeExtensible(WAVEFORMATEXTENSIBLE *out, const WAVEFORMATEX *in)
  287. {
  288. *out = WAVEFORMATEXTENSIBLE{};
  289. if(in->wFormatTag == WAVE_FORMAT_EXTENSIBLE)
  290. {
  291. *out = *CONTAINING_RECORD(in, const WAVEFORMATEXTENSIBLE, Format);
  292. out->Format.cbSize = sizeof(*out) - sizeof(out->Format);
  293. }
  294. else if(in->wFormatTag == WAVE_FORMAT_PCM)
  295. {
  296. out->Format = *in;
  297. out->Format.cbSize = 0;
  298. out->Samples.wValidBitsPerSample = out->Format.wBitsPerSample;
  299. if(out->Format.nChannels == 1)
  300. out->dwChannelMask = MONO;
  301. else if(out->Format.nChannels == 2)
  302. out->dwChannelMask = STEREO;
  303. else
  304. ERR("Unhandled PCM channel count: %d\n", out->Format.nChannels);
  305. out->SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  306. }
  307. else if(in->wFormatTag == WAVE_FORMAT_IEEE_FLOAT)
  308. {
  309. out->Format = *in;
  310. out->Format.cbSize = 0;
  311. out->Samples.wValidBitsPerSample = out->Format.wBitsPerSample;
  312. if(out->Format.nChannels == 1)
  313. out->dwChannelMask = MONO;
  314. else if(out->Format.nChannels == 2)
  315. out->dwChannelMask = STEREO;
  316. else
  317. ERR("Unhandled IEEE float channel count: %d\n", out->Format.nChannels);
  318. out->SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  319. }
  320. else
  321. {
  322. ERR("Unhandled format tag: 0x%04x\n", in->wFormatTag);
  323. return false;
  324. }
  325. return true;
  326. }
  327. void TraceFormat(const char *msg, const WAVEFORMATEX *format)
  328. {
  329. constexpr size_t fmtex_extra_size{sizeof(WAVEFORMATEXTENSIBLE)-sizeof(WAVEFORMATEX)};
  330. if(format->wFormatTag == WAVE_FORMAT_EXTENSIBLE && format->cbSize >= fmtex_extra_size)
  331. {
  332. const WAVEFORMATEXTENSIBLE *fmtex{
  333. CONTAINING_RECORD(format, const WAVEFORMATEXTENSIBLE, Format)};
  334. TRACE("%s:\n"
  335. " FormatTag = 0x%04x\n"
  336. " Channels = %d\n"
  337. " SamplesPerSec = %lu\n"
  338. " AvgBytesPerSec = %lu\n"
  339. " BlockAlign = %d\n"
  340. " BitsPerSample = %d\n"
  341. " Size = %d\n"
  342. " Samples = %d\n"
  343. " ChannelMask = 0x%lx\n"
  344. " SubFormat = %s\n",
  345. msg, fmtex->Format.wFormatTag, fmtex->Format.nChannels, fmtex->Format.nSamplesPerSec,
  346. fmtex->Format.nAvgBytesPerSec, fmtex->Format.nBlockAlign, fmtex->Format.wBitsPerSample,
  347. fmtex->Format.cbSize, fmtex->Samples.wReserved, fmtex->dwChannelMask,
  348. GuidPrinter{fmtex->SubFormat}.c_str());
  349. }
  350. else
  351. TRACE("%s:\n"
  352. " FormatTag = 0x%04x\n"
  353. " Channels = %d\n"
  354. " SamplesPerSec = %lu\n"
  355. " AvgBytesPerSec = %lu\n"
  356. " BlockAlign = %d\n"
  357. " BitsPerSample = %d\n"
  358. " Size = %d\n",
  359. msg, format->wFormatTag, format->nChannels, format->nSamplesPerSec,
  360. format->nAvgBytesPerSec, format->nBlockAlign, format->wBitsPerSample, format->cbSize);
  361. }
  362. enum class MsgType {
  363. OpenDevice,
  364. ReopenDevice,
  365. ResetDevice,
  366. StartDevice,
  367. StopDevice,
  368. CloseDevice,
  369. EnumeratePlayback,
  370. EnumerateCapture,
  371. Count,
  372. QuitThread = Count
  373. };
  374. constexpr char MessageStr[static_cast<size_t>(MsgType::Count)][20]{
  375. "Open Device",
  376. "Reopen Device",
  377. "Reset Device",
  378. "Start Device",
  379. "Stop Device",
  380. "Close Device",
  381. "Enumerate Playback",
  382. "Enumerate Capture"
  383. };
  384. /* Proxy interface used by the message handler. */
  385. struct WasapiProxy {
  386. virtual ~WasapiProxy() = default;
  387. virtual HRESULT openProxy(const char *name) = 0;
  388. virtual void closeProxy() = 0;
  389. virtual HRESULT resetProxy() = 0;
  390. virtual HRESULT startProxy() = 0;
  391. virtual void stopProxy() = 0;
  392. struct Msg {
  393. MsgType mType;
  394. WasapiProxy *mProxy;
  395. const char *mParam;
  396. std::promise<HRESULT> mPromise;
  397. explicit operator bool() const noexcept { return mType != MsgType::QuitThread; }
  398. };
  399. static std::deque<Msg> mMsgQueue;
  400. static std::mutex mMsgQueueLock;
  401. static std::condition_variable mMsgQueueCond;
  402. std::future<HRESULT> pushMessage(MsgType type, const char *param=nullptr)
  403. {
  404. std::promise<HRESULT> promise;
  405. std::future<HRESULT> future{promise.get_future()};
  406. {
  407. std::lock_guard<std::mutex> _{mMsgQueueLock};
  408. mMsgQueue.emplace_back(Msg{type, this, param, std::move(promise)});
  409. }
  410. mMsgQueueCond.notify_one();
  411. return future;
  412. }
  413. static std::future<HRESULT> pushMessageStatic(MsgType type)
  414. {
  415. std::promise<HRESULT> promise;
  416. std::future<HRESULT> future{promise.get_future()};
  417. {
  418. std::lock_guard<std::mutex> _{mMsgQueueLock};
  419. mMsgQueue.emplace_back(Msg{type, nullptr, nullptr, std::move(promise)});
  420. }
  421. mMsgQueueCond.notify_one();
  422. return future;
  423. }
  424. static Msg popMessage()
  425. {
  426. std::unique_lock<std::mutex> lock{mMsgQueueLock};
  427. mMsgQueueCond.wait(lock, []{return !mMsgQueue.empty();});
  428. Msg msg{std::move(mMsgQueue.front())};
  429. mMsgQueue.pop_front();
  430. return msg;
  431. }
  432. static int messageHandler(std::promise<HRESULT> *promise);
  433. };
  434. std::deque<WasapiProxy::Msg> WasapiProxy::mMsgQueue;
  435. std::mutex WasapiProxy::mMsgQueueLock;
  436. std::condition_variable WasapiProxy::mMsgQueueCond;
  437. int WasapiProxy::messageHandler(std::promise<HRESULT> *promise)
  438. {
  439. TRACE("Starting message thread\n");
  440. HRESULT cohr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  441. if(FAILED(cohr))
  442. {
  443. WARN("Failed to initialize COM: 0x%08lx\n", cohr);
  444. promise->set_value(cohr);
  445. return 0;
  446. }
  447. void *ptr{};
  448. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  449. IID_IMMDeviceEnumerator, &ptr)};
  450. if(FAILED(hr))
  451. {
  452. WARN("Failed to create IMMDeviceEnumerator instance: 0x%08lx\n", hr);
  453. promise->set_value(hr);
  454. CoUninitialize();
  455. return 0;
  456. }
  457. static_cast<IMMDeviceEnumerator*>(ptr)->Release();
  458. CoUninitialize();
  459. TRACE("Message thread initialization complete\n");
  460. promise->set_value(S_OK);
  461. promise = nullptr;
  462. TRACE("Starting message loop\n");
  463. uint deviceCount{0};
  464. while(Msg msg{popMessage()})
  465. {
  466. TRACE("Got message \"%s\" (0x%04x, this=%p, param=%p)\n",
  467. MessageStr[static_cast<size_t>(msg.mType)], static_cast<uint>(msg.mType),
  468. static_cast<void*>(msg.mProxy), static_cast<const void*>(msg.mParam));
  469. switch(msg.mType)
  470. {
  471. case MsgType::OpenDevice:
  472. hr = cohr = S_OK;
  473. if(++deviceCount == 1)
  474. hr = cohr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
  475. if(SUCCEEDED(hr))
  476. hr = msg.mProxy->openProxy(msg.mParam);
  477. msg.mPromise.set_value(hr);
  478. if(FAILED(hr))
  479. {
  480. if(--deviceCount == 0 && SUCCEEDED(cohr))
  481. CoUninitialize();
  482. }
  483. continue;
  484. case MsgType::ReopenDevice:
  485. hr = msg.mProxy->openProxy(msg.mParam);
  486. msg.mPromise.set_value(hr);
  487. continue;
  488. case MsgType::ResetDevice:
  489. hr = msg.mProxy->resetProxy();
  490. msg.mPromise.set_value(hr);
  491. continue;
  492. case MsgType::StartDevice:
  493. hr = msg.mProxy->startProxy();
  494. msg.mPromise.set_value(hr);
  495. continue;
  496. case MsgType::StopDevice:
  497. msg.mProxy->stopProxy();
  498. msg.mPromise.set_value(S_OK);
  499. continue;
  500. case MsgType::CloseDevice:
  501. msg.mProxy->closeProxy();
  502. msg.mPromise.set_value(S_OK);
  503. if(--deviceCount == 0)
  504. CoUninitialize();
  505. continue;
  506. case MsgType::EnumeratePlayback:
  507. case MsgType::EnumerateCapture:
  508. hr = cohr = S_OK;
  509. if(++deviceCount == 1)
  510. hr = cohr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
  511. if(SUCCEEDED(hr))
  512. hr = CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  513. IID_IMMDeviceEnumerator, &ptr);
  514. if(FAILED(hr))
  515. msg.mPromise.set_value(hr);
  516. else
  517. {
  518. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  519. if(msg.mType == MsgType::EnumeratePlayback)
  520. probe_devices(enumerator.get(), eRender, PlaybackDevices);
  521. else if(msg.mType == MsgType::EnumerateCapture)
  522. probe_devices(enumerator.get(), eCapture, CaptureDevices);
  523. msg.mPromise.set_value(S_OK);
  524. }
  525. if(--deviceCount == 0 && SUCCEEDED(cohr))
  526. CoUninitialize();
  527. continue;
  528. case MsgType::QuitThread:
  529. break;
  530. }
  531. ERR("Unexpected message: %u\n", static_cast<uint>(msg.mType));
  532. msg.mPromise.set_value(E_FAIL);
  533. }
  534. TRACE("Message loop finished\n");
  535. return 0;
  536. }
  537. struct WasapiPlayback final : public BackendBase, WasapiProxy {
  538. WasapiPlayback(DeviceBase *device) noexcept : BackendBase{device} { }
  539. ~WasapiPlayback() override;
  540. int mixerProc();
  541. void open(const char *name) override;
  542. HRESULT openProxy(const char *name) override;
  543. void closeProxy() override;
  544. bool reset() override;
  545. HRESULT resetProxy() override;
  546. void start() override;
  547. HRESULT startProxy() override;
  548. void stop() override;
  549. void stopProxy() override;
  550. ClockLatency getClockLatency() override;
  551. HRESULT mOpenStatus{E_FAIL};
  552. ComPtr<IMMDevice> mMMDev{nullptr};
  553. ComPtr<IAudioClient> mClient{nullptr};
  554. ComPtr<IAudioRenderClient> mRender{nullptr};
  555. HANDLE mNotifyEvent{nullptr};
  556. UINT32 mFrameStep{0u};
  557. std::atomic<UINT32> mPadding{0u};
  558. std::mutex mMutex;
  559. std::atomic<bool> mKillNow{true};
  560. std::thread mThread;
  561. DEF_NEWDEL(WasapiPlayback)
  562. };
  563. WasapiPlayback::~WasapiPlayback()
  564. {
  565. if(SUCCEEDED(mOpenStatus))
  566. pushMessage(MsgType::CloseDevice).wait();
  567. mOpenStatus = E_FAIL;
  568. if(mNotifyEvent != nullptr)
  569. CloseHandle(mNotifyEvent);
  570. mNotifyEvent = nullptr;
  571. }
  572. FORCE_ALIGN int WasapiPlayback::mixerProc()
  573. {
  574. HRESULT hr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  575. if(FAILED(hr))
  576. {
  577. ERR("CoInitializeEx(nullptr, COINIT_MULTITHREADED) failed: 0x%08lx\n", hr);
  578. mDevice->handleDisconnect("COM init failed: 0x%08lx", hr);
  579. return 1;
  580. }
  581. SetRTPriority();
  582. althrd_setname(MIXER_THREAD_NAME);
  583. const uint update_size{mDevice->UpdateSize};
  584. const UINT32 buffer_len{mDevice->BufferSize};
  585. while(!mKillNow.load(std::memory_order_relaxed))
  586. {
  587. UINT32 written;
  588. hr = mClient->GetCurrentPadding(&written);
  589. if(FAILED(hr))
  590. {
  591. ERR("Failed to get padding: 0x%08lx\n", hr);
  592. mDevice->handleDisconnect("Failed to retrieve buffer padding: 0x%08lx", hr);
  593. break;
  594. }
  595. mPadding.store(written, std::memory_order_relaxed);
  596. uint len{buffer_len - written};
  597. if(len < update_size)
  598. {
  599. DWORD res{WaitForSingleObjectEx(mNotifyEvent, 2000, FALSE)};
  600. if(res != WAIT_OBJECT_0)
  601. ERR("WaitForSingleObjectEx error: 0x%lx\n", res);
  602. continue;
  603. }
  604. BYTE *buffer;
  605. hr = mRender->GetBuffer(len, &buffer);
  606. if(SUCCEEDED(hr))
  607. {
  608. {
  609. std::lock_guard<std::mutex> _{mMutex};
  610. mDevice->renderSamples(buffer, len, mFrameStep);
  611. mPadding.store(written + len, std::memory_order_relaxed);
  612. }
  613. hr = mRender->ReleaseBuffer(len, 0);
  614. }
  615. if(FAILED(hr))
  616. {
  617. ERR("Failed to buffer data: 0x%08lx\n", hr);
  618. mDevice->handleDisconnect("Failed to send playback samples: 0x%08lx", hr);
  619. break;
  620. }
  621. }
  622. mPadding.store(0u, std::memory_order_release);
  623. CoUninitialize();
  624. return 0;
  625. }
  626. void WasapiPlayback::open(const char *name)
  627. {
  628. HRESULT hr{S_OK};
  629. if(!mNotifyEvent)
  630. {
  631. mNotifyEvent = CreateEventW(nullptr, FALSE, FALSE, nullptr);
  632. if(mNotifyEvent == nullptr)
  633. {
  634. ERR("Failed to create notify events: %lu\n", GetLastError());
  635. hr = E_FAIL;
  636. }
  637. }
  638. if(SUCCEEDED(hr))
  639. {
  640. if(name)
  641. {
  642. if(PlaybackDevices.empty())
  643. pushMessage(MsgType::EnumeratePlayback);
  644. if(std::strncmp(name, DevNameHead, DevNameHeadLen) == 0)
  645. {
  646. name += DevNameHeadLen;
  647. if(*name == '\0')
  648. name = nullptr;
  649. }
  650. }
  651. if(SUCCEEDED(mOpenStatus))
  652. hr = pushMessage(MsgType::ReopenDevice, name).get();
  653. else
  654. {
  655. hr = pushMessage(MsgType::OpenDevice, name).get();
  656. mOpenStatus = hr;
  657. }
  658. }
  659. if(FAILED(hr))
  660. throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
  661. hr};
  662. }
  663. HRESULT WasapiPlayback::openProxy(const char *name)
  664. {
  665. const wchar_t *devid{nullptr};
  666. if(name)
  667. {
  668. auto iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
  669. [name](const DevMap &entry) -> bool
  670. { return entry.name == name || entry.endpoint_guid == name; });
  671. if(iter == PlaybackDevices.cend())
  672. {
  673. const std::wstring wname{utf8_to_wstr(name)};
  674. iter = std::find_if(PlaybackDevices.cbegin(), PlaybackDevices.cend(),
  675. [&wname](const DevMap &entry) -> bool
  676. { return entry.devid == wname; });
  677. }
  678. if(iter == PlaybackDevices.cend())
  679. {
  680. WARN("Failed to find device name matching \"%s\"\n", name);
  681. return E_FAIL;
  682. }
  683. name = iter->name.c_str();
  684. devid = iter->devid.c_str();
  685. }
  686. void *ptr;
  687. ComPtr<IMMDevice> mmdev;
  688. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  689. IID_IMMDeviceEnumerator, &ptr)};
  690. if(SUCCEEDED(hr))
  691. {
  692. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  693. if(!devid)
  694. hr = enumerator->GetDefaultAudioEndpoint(eRender, eMultimedia, mmdev.getPtr());
  695. else
  696. hr = enumerator->GetDevice(devid, mmdev.getPtr());
  697. }
  698. if(FAILED(hr))
  699. {
  700. WARN("Failed to open device \"%s\"\n", name?name:"(default)");
  701. return hr;
  702. }
  703. mClient = nullptr;
  704. mMMDev = std::move(mmdev);
  705. if(name) mDevice->DeviceName = std::string{DevNameHead} + name;
  706. else mDevice->DeviceName = DevNameHead + get_device_name_and_guid(mMMDev.get()).first;
  707. return hr;
  708. }
  709. void WasapiPlayback::closeProxy()
  710. {
  711. mClient = nullptr;
  712. mMMDev = nullptr;
  713. }
  714. bool WasapiPlayback::reset()
  715. {
  716. HRESULT hr{pushMessage(MsgType::ResetDevice).get()};
  717. if(FAILED(hr))
  718. throw al::backend_exception{al::backend_error::DeviceError, "0x%08lx", hr};
  719. return true;
  720. }
  721. HRESULT WasapiPlayback::resetProxy()
  722. {
  723. mClient = nullptr;
  724. void *ptr;
  725. HRESULT hr{mMMDev->Activate(IID_IAudioClient, CLSCTX_INPROC_SERVER, nullptr, &ptr)};
  726. if(FAILED(hr))
  727. {
  728. ERR("Failed to reactivate audio client: 0x%08lx\n", hr);
  729. return hr;
  730. }
  731. mClient = ComPtr<IAudioClient>{static_cast<IAudioClient*>(ptr)};
  732. WAVEFORMATEX *wfx;
  733. hr = mClient->GetMixFormat(&wfx);
  734. if(FAILED(hr))
  735. {
  736. ERR("Failed to get mix format: 0x%08lx\n", hr);
  737. return hr;
  738. }
  739. TraceFormat("Device mix format", wfx);
  740. WAVEFORMATEXTENSIBLE OutputType;
  741. if(!MakeExtensible(&OutputType, wfx))
  742. {
  743. CoTaskMemFree(wfx);
  744. return E_FAIL;
  745. }
  746. CoTaskMemFree(wfx);
  747. wfx = nullptr;
  748. const ReferenceTime per_time{ReferenceTime{seconds{mDevice->UpdateSize}} / mDevice->Frequency};
  749. const ReferenceTime buf_time{ReferenceTime{seconds{mDevice->BufferSize}} / mDevice->Frequency};
  750. bool isRear51{false};
  751. if(!mDevice->Flags.test(FrequencyRequest))
  752. mDevice->Frequency = OutputType.Format.nSamplesPerSec;
  753. if(!mDevice->Flags.test(ChannelsRequest))
  754. {
  755. /* If not requesting a channel configuration, auto-select given what
  756. * fits the mask's lsb (to ensure no gaps in the output channels). If
  757. * there's no mask, we can only assume mono or stereo.
  758. */
  759. const uint32_t chancount{OutputType.Format.nChannels};
  760. const DWORD chanmask{OutputType.dwChannelMask};
  761. if(chancount >= 8 && (chanmask&X71Mask) == X7DOT1)
  762. mDevice->FmtChans = DevFmtX71;
  763. else if(chancount >= 7 && (chanmask&X61Mask) == X6DOT1)
  764. mDevice->FmtChans = DevFmtX61;
  765. else if(chancount >= 6 && (chanmask&X51Mask) == X5DOT1)
  766. mDevice->FmtChans = DevFmtX51;
  767. else if(chancount >= 6 && (chanmask&X51RearMask) == X5DOT1REAR)
  768. {
  769. mDevice->FmtChans = DevFmtX51;
  770. isRear51 = true;
  771. }
  772. else if(chancount >= 4 && (chanmask&QuadMask) == QUAD)
  773. mDevice->FmtChans = DevFmtQuad;
  774. else if(chancount >= 2 && ((chanmask&StereoMask) == STEREO || !chanmask))
  775. mDevice->FmtChans = DevFmtStereo;
  776. else if(chancount >= 1 && ((chanmask&MonoMask) == MONO || !chanmask))
  777. mDevice->FmtChans = DevFmtMono;
  778. else
  779. ERR("Unhandled channel config: %d -- 0x%08lx\n", chancount, chanmask);
  780. }
  781. else
  782. {
  783. const uint32_t chancount{OutputType.Format.nChannels};
  784. const DWORD chanmask{OutputType.dwChannelMask};
  785. isRear51 = (chancount == 6 && (chanmask&X51RearMask) == X5DOT1REAR);
  786. }
  787. OutputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  788. switch(mDevice->FmtChans)
  789. {
  790. case DevFmtMono:
  791. OutputType.Format.nChannels = 1;
  792. OutputType.dwChannelMask = MONO;
  793. break;
  794. case DevFmtAmbi3D:
  795. mDevice->FmtChans = DevFmtStereo;
  796. /*fall-through*/
  797. case DevFmtStereo:
  798. OutputType.Format.nChannels = 2;
  799. OutputType.dwChannelMask = STEREO;
  800. break;
  801. case DevFmtQuad:
  802. OutputType.Format.nChannels = 4;
  803. OutputType.dwChannelMask = QUAD;
  804. break;
  805. case DevFmtX51:
  806. OutputType.Format.nChannels = 6;
  807. OutputType.dwChannelMask = isRear51 ? X5DOT1REAR : X5DOT1;
  808. break;
  809. case DevFmtX61:
  810. OutputType.Format.nChannels = 7;
  811. OutputType.dwChannelMask = X6DOT1;
  812. break;
  813. case DevFmtX71:
  814. case DevFmtX3D71:
  815. OutputType.Format.nChannels = 8;
  816. OutputType.dwChannelMask = X7DOT1;
  817. break;
  818. }
  819. switch(mDevice->FmtType)
  820. {
  821. case DevFmtByte:
  822. mDevice->FmtType = DevFmtUByte;
  823. /* fall-through */
  824. case DevFmtUByte:
  825. OutputType.Format.wBitsPerSample = 8;
  826. OutputType.Samples.wValidBitsPerSample = 8;
  827. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  828. break;
  829. case DevFmtUShort:
  830. mDevice->FmtType = DevFmtShort;
  831. /* fall-through */
  832. case DevFmtShort:
  833. OutputType.Format.wBitsPerSample = 16;
  834. OutputType.Samples.wValidBitsPerSample = 16;
  835. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  836. break;
  837. case DevFmtUInt:
  838. mDevice->FmtType = DevFmtInt;
  839. /* fall-through */
  840. case DevFmtInt:
  841. OutputType.Format.wBitsPerSample = 32;
  842. OutputType.Samples.wValidBitsPerSample = 32;
  843. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  844. break;
  845. case DevFmtFloat:
  846. OutputType.Format.wBitsPerSample = 32;
  847. OutputType.Samples.wValidBitsPerSample = 32;
  848. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  849. break;
  850. }
  851. OutputType.Format.nSamplesPerSec = mDevice->Frequency;
  852. OutputType.Format.nBlockAlign = static_cast<WORD>(OutputType.Format.nChannels *
  853. OutputType.Format.wBitsPerSample / 8);
  854. OutputType.Format.nAvgBytesPerSec = OutputType.Format.nSamplesPerSec *
  855. OutputType.Format.nBlockAlign;
  856. TraceFormat("Requesting playback format", &OutputType.Format);
  857. hr = mClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &OutputType.Format, &wfx);
  858. if(FAILED(hr))
  859. {
  860. ERR("Failed to check format support: 0x%08lx\n", hr);
  861. hr = mClient->GetMixFormat(&wfx);
  862. }
  863. if(FAILED(hr))
  864. {
  865. ERR("Failed to find a supported format: 0x%08lx\n", hr);
  866. return hr;
  867. }
  868. if(wfx != nullptr)
  869. {
  870. TraceFormat("Got playback format", wfx);
  871. if(!MakeExtensible(&OutputType, wfx))
  872. {
  873. CoTaskMemFree(wfx);
  874. return E_FAIL;
  875. }
  876. CoTaskMemFree(wfx);
  877. wfx = nullptr;
  878. mDevice->Frequency = OutputType.Format.nSamplesPerSec;
  879. const uint32_t chancount{OutputType.Format.nChannels};
  880. const DWORD chanmask{OutputType.dwChannelMask};
  881. /* Don't update the channel format if the requested format fits what's
  882. * supported.
  883. */
  884. bool chansok{false};
  885. if(mDevice->Flags.test(ChannelsRequest))
  886. {
  887. /* When requesting a channel configuration, make sure it fits the
  888. * mask's lsb (to ensure no gaps in the output channels). If
  889. * there's no mask, assume the request fits with enough channels.
  890. */
  891. switch(mDevice->FmtChans)
  892. {
  893. case DevFmtMono:
  894. chansok = (chancount >= 1 && ((chanmask&MonoMask) == MONO || !chanmask));
  895. break;
  896. case DevFmtStereo:
  897. chansok = (chancount >= 2 && ((chanmask&StereoMask) == STEREO || !chanmask));
  898. break;
  899. case DevFmtQuad:
  900. chansok = (chancount >= 4 && ((chanmask&QuadMask) == QUAD || !chanmask));
  901. break;
  902. case DevFmtX51:
  903. chansok = (chancount >= 6 && ((chanmask&X51Mask) == X5DOT1
  904. || (chanmask&X51RearMask) == X5DOT1REAR || !chanmask));
  905. break;
  906. case DevFmtX61:
  907. chansok = (chancount >= 7 && ((chanmask&X61Mask) == X6DOT1 || !chanmask));
  908. break;
  909. case DevFmtX71:
  910. case DevFmtX3D71:
  911. chansok = (chancount >= 8 && ((chanmask&X71Mask) == X7DOT1 || !chanmask));
  912. break;
  913. case DevFmtAmbi3D:
  914. break;
  915. }
  916. }
  917. if(!chansok)
  918. {
  919. if(chancount >= 8 && (chanmask&X71Mask) == X7DOT1)
  920. mDevice->FmtChans = DevFmtX71;
  921. else if(chancount >= 7 && (chanmask&X61Mask) == X6DOT1)
  922. mDevice->FmtChans = DevFmtX61;
  923. else if(chancount >= 6 && ((chanmask&X51Mask) == X5DOT1
  924. || (chanmask&X51RearMask) == X5DOT1REAR))
  925. mDevice->FmtChans = DevFmtX51;
  926. else if(chancount >= 4 && (chanmask&QuadMask) == QUAD)
  927. mDevice->FmtChans = DevFmtQuad;
  928. else if(chancount >= 2 && ((chanmask&StereoMask) == STEREO || !chanmask))
  929. mDevice->FmtChans = DevFmtStereo;
  930. else if(chancount >= 1 && ((chanmask&MonoMask) == MONO || !chanmask))
  931. mDevice->FmtChans = DevFmtMono;
  932. else
  933. {
  934. ERR("Unhandled extensible channels: %d -- 0x%08lx\n", OutputType.Format.nChannels,
  935. OutputType.dwChannelMask);
  936. mDevice->FmtChans = DevFmtStereo;
  937. OutputType.Format.nChannels = 2;
  938. OutputType.dwChannelMask = STEREO;
  939. }
  940. }
  941. if(IsEqualGUID(OutputType.SubFormat, KSDATAFORMAT_SUBTYPE_PCM))
  942. {
  943. if(OutputType.Format.wBitsPerSample == 8)
  944. mDevice->FmtType = DevFmtUByte;
  945. else if(OutputType.Format.wBitsPerSample == 16)
  946. mDevice->FmtType = DevFmtShort;
  947. else if(OutputType.Format.wBitsPerSample == 32)
  948. mDevice->FmtType = DevFmtInt;
  949. else
  950. {
  951. mDevice->FmtType = DevFmtShort;
  952. OutputType.Format.wBitsPerSample = 16;
  953. }
  954. }
  955. else if(IsEqualGUID(OutputType.SubFormat, KSDATAFORMAT_SUBTYPE_IEEE_FLOAT))
  956. {
  957. mDevice->FmtType = DevFmtFloat;
  958. OutputType.Format.wBitsPerSample = 32;
  959. }
  960. else
  961. {
  962. ERR("Unhandled format sub-type: %s\n", GuidPrinter{OutputType.SubFormat}.c_str());
  963. mDevice->FmtType = DevFmtShort;
  964. if(OutputType.Format.wFormatTag != WAVE_FORMAT_EXTENSIBLE)
  965. OutputType.Format.wFormatTag = WAVE_FORMAT_PCM;
  966. OutputType.Format.wBitsPerSample = 16;
  967. OutputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  968. }
  969. OutputType.Samples.wValidBitsPerSample = OutputType.Format.wBitsPerSample;
  970. }
  971. mFrameStep = OutputType.Format.nChannels;
  972. const EndpointFormFactor formfactor{get_device_formfactor(mMMDev.get())};
  973. mDevice->Flags.set(DirectEar, (formfactor == Headphones || formfactor == Headset));
  974. setDefaultWFXChannelOrder();
  975. hr = mClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  976. buf_time.count(), 0, &OutputType.Format, nullptr);
  977. if(FAILED(hr))
  978. {
  979. ERR("Failed to initialize audio client: 0x%08lx\n", hr);
  980. return hr;
  981. }
  982. UINT32 buffer_len{};
  983. ReferenceTime min_per{};
  984. hr = mClient->GetDevicePeriod(&reinterpret_cast<REFERENCE_TIME&>(min_per), nullptr);
  985. if(SUCCEEDED(hr))
  986. hr = mClient->GetBufferSize(&buffer_len);
  987. if(FAILED(hr))
  988. {
  989. ERR("Failed to get audio buffer info: 0x%08lx\n", hr);
  990. return hr;
  991. }
  992. /* Find the nearest multiple of the period size to the update size */
  993. if(min_per < per_time)
  994. min_per *= maxi64((per_time + min_per/2) / min_per, 1);
  995. mDevice->UpdateSize = minu(RefTime2Samples(min_per, mDevice->Frequency), buffer_len/2);
  996. mDevice->BufferSize = buffer_len;
  997. hr = mClient->SetEventHandle(mNotifyEvent);
  998. if(FAILED(hr))
  999. {
  1000. ERR("Failed to set event handle: 0x%08lx\n", hr);
  1001. return hr;
  1002. }
  1003. return hr;
  1004. }
  1005. void WasapiPlayback::start()
  1006. {
  1007. const HRESULT hr{pushMessage(MsgType::StartDevice).get()};
  1008. if(FAILED(hr))
  1009. throw al::backend_exception{al::backend_error::DeviceError,
  1010. "Failed to start playback: 0x%lx", hr};
  1011. }
  1012. HRESULT WasapiPlayback::startProxy()
  1013. {
  1014. ResetEvent(mNotifyEvent);
  1015. HRESULT hr{mClient->Start()};
  1016. if(FAILED(hr))
  1017. {
  1018. ERR("Failed to start audio client: 0x%08lx\n", hr);
  1019. return hr;
  1020. }
  1021. void *ptr;
  1022. hr = mClient->GetService(IID_IAudioRenderClient, &ptr);
  1023. if(SUCCEEDED(hr))
  1024. {
  1025. mRender = ComPtr<IAudioRenderClient>{static_cast<IAudioRenderClient*>(ptr)};
  1026. try {
  1027. mKillNow.store(false, std::memory_order_release);
  1028. mThread = std::thread{std::mem_fn(&WasapiPlayback::mixerProc), this};
  1029. }
  1030. catch(...) {
  1031. mRender = nullptr;
  1032. ERR("Failed to start thread\n");
  1033. hr = E_FAIL;
  1034. }
  1035. }
  1036. if(FAILED(hr))
  1037. mClient->Stop();
  1038. return hr;
  1039. }
  1040. void WasapiPlayback::stop()
  1041. { pushMessage(MsgType::StopDevice).wait(); }
  1042. void WasapiPlayback::stopProxy()
  1043. {
  1044. if(!mRender || !mThread.joinable())
  1045. return;
  1046. mKillNow.store(true, std::memory_order_release);
  1047. mThread.join();
  1048. mRender = nullptr;
  1049. mClient->Stop();
  1050. }
  1051. ClockLatency WasapiPlayback::getClockLatency()
  1052. {
  1053. ClockLatency ret;
  1054. std::lock_guard<std::mutex> _{mMutex};
  1055. ret.ClockTime = GetDeviceClockTime(mDevice);
  1056. ret.Latency = std::chrono::seconds{mPadding.load(std::memory_order_relaxed)};
  1057. ret.Latency /= mDevice->Frequency;
  1058. return ret;
  1059. }
  1060. struct WasapiCapture final : public BackendBase, WasapiProxy {
  1061. WasapiCapture(DeviceBase *device) noexcept : BackendBase{device} { }
  1062. ~WasapiCapture() override;
  1063. int recordProc();
  1064. void open(const char *name) override;
  1065. HRESULT openProxy(const char *name) override;
  1066. void closeProxy() override;
  1067. HRESULT resetProxy() override;
  1068. void start() override;
  1069. HRESULT startProxy() override;
  1070. void stop() override;
  1071. void stopProxy() override;
  1072. void captureSamples(al::byte *buffer, uint samples) override;
  1073. uint availableSamples() override;
  1074. HRESULT mOpenStatus{E_FAIL};
  1075. ComPtr<IMMDevice> mMMDev{nullptr};
  1076. ComPtr<IAudioClient> mClient{nullptr};
  1077. ComPtr<IAudioCaptureClient> mCapture{nullptr};
  1078. HANDLE mNotifyEvent{nullptr};
  1079. ChannelConverter mChannelConv{};
  1080. SampleConverterPtr mSampleConv;
  1081. RingBufferPtr mRing;
  1082. std::atomic<bool> mKillNow{true};
  1083. std::thread mThread;
  1084. DEF_NEWDEL(WasapiCapture)
  1085. };
  1086. WasapiCapture::~WasapiCapture()
  1087. {
  1088. if(SUCCEEDED(mOpenStatus))
  1089. pushMessage(MsgType::CloseDevice).wait();
  1090. mOpenStatus = E_FAIL;
  1091. if(mNotifyEvent != nullptr)
  1092. CloseHandle(mNotifyEvent);
  1093. mNotifyEvent = nullptr;
  1094. }
  1095. FORCE_ALIGN int WasapiCapture::recordProc()
  1096. {
  1097. HRESULT hr{CoInitializeEx(nullptr, COINIT_MULTITHREADED)};
  1098. if(FAILED(hr))
  1099. {
  1100. ERR("CoInitializeEx(nullptr, COINIT_MULTITHREADED) failed: 0x%08lx\n", hr);
  1101. mDevice->handleDisconnect("COM init failed: 0x%08lx", hr);
  1102. return 1;
  1103. }
  1104. althrd_setname(RECORD_THREAD_NAME);
  1105. al::vector<float> samples;
  1106. while(!mKillNow.load(std::memory_order_relaxed))
  1107. {
  1108. UINT32 avail;
  1109. hr = mCapture->GetNextPacketSize(&avail);
  1110. if(FAILED(hr))
  1111. ERR("Failed to get next packet size: 0x%08lx\n", hr);
  1112. else if(avail > 0)
  1113. {
  1114. UINT32 numsamples;
  1115. DWORD flags;
  1116. BYTE *rdata;
  1117. hr = mCapture->GetBuffer(&rdata, &numsamples, &flags, nullptr, nullptr);
  1118. if(FAILED(hr))
  1119. ERR("Failed to get capture buffer: 0x%08lx\n", hr);
  1120. else
  1121. {
  1122. if(mChannelConv.is_active())
  1123. {
  1124. samples.resize(numsamples*2);
  1125. mChannelConv.convert(rdata, samples.data(), numsamples);
  1126. rdata = reinterpret_cast<BYTE*>(samples.data());
  1127. }
  1128. auto data = mRing->getWriteVector();
  1129. size_t dstframes;
  1130. if(mSampleConv)
  1131. {
  1132. const void *srcdata{rdata};
  1133. uint srcframes{numsamples};
  1134. dstframes = mSampleConv->convert(&srcdata, &srcframes, data.first.buf,
  1135. static_cast<uint>(minz(data.first.len, INT_MAX)));
  1136. if(srcframes > 0 && dstframes == data.first.len && data.second.len > 0)
  1137. {
  1138. /* If some source samples remain, all of the first dest
  1139. * block was filled, and there's space in the second
  1140. * dest block, do another run for the second block.
  1141. */
  1142. dstframes += mSampleConv->convert(&srcdata, &srcframes, data.second.buf,
  1143. static_cast<uint>(minz(data.second.len, INT_MAX)));
  1144. }
  1145. }
  1146. else
  1147. {
  1148. const uint framesize{mDevice->frameSizeFromFmt()};
  1149. size_t len1{minz(data.first.len, numsamples)};
  1150. size_t len2{minz(data.second.len, numsamples-len1)};
  1151. memcpy(data.first.buf, rdata, len1*framesize);
  1152. if(len2 > 0)
  1153. memcpy(data.second.buf, rdata+len1*framesize, len2*framesize);
  1154. dstframes = len1 + len2;
  1155. }
  1156. mRing->writeAdvance(dstframes);
  1157. hr = mCapture->ReleaseBuffer(numsamples);
  1158. if(FAILED(hr)) ERR("Failed to release capture buffer: 0x%08lx\n", hr);
  1159. }
  1160. }
  1161. if(FAILED(hr))
  1162. {
  1163. mDevice->handleDisconnect("Failed to capture samples: 0x%08lx", hr);
  1164. break;
  1165. }
  1166. DWORD res{WaitForSingleObjectEx(mNotifyEvent, 2000, FALSE)};
  1167. if(res != WAIT_OBJECT_0)
  1168. ERR("WaitForSingleObjectEx error: 0x%lx\n", res);
  1169. }
  1170. CoUninitialize();
  1171. return 0;
  1172. }
  1173. void WasapiCapture::open(const char *name)
  1174. {
  1175. HRESULT hr{S_OK};
  1176. mNotifyEvent = CreateEventW(nullptr, FALSE, FALSE, nullptr);
  1177. if(mNotifyEvent == nullptr)
  1178. {
  1179. ERR("Failed to create notify event: %lu\n", GetLastError());
  1180. hr = E_FAIL;
  1181. }
  1182. if(SUCCEEDED(hr))
  1183. {
  1184. if(name)
  1185. {
  1186. if(CaptureDevices.empty())
  1187. pushMessage(MsgType::EnumerateCapture);
  1188. if(std::strncmp(name, DevNameHead, DevNameHeadLen) == 0)
  1189. {
  1190. name += DevNameHeadLen;
  1191. if(*name == '\0')
  1192. name = nullptr;
  1193. }
  1194. }
  1195. hr = pushMessage(MsgType::OpenDevice, name).get();
  1196. }
  1197. mOpenStatus = hr;
  1198. if(FAILED(hr))
  1199. throw al::backend_exception{al::backend_error::DeviceError, "Device init failed: 0x%08lx",
  1200. hr};
  1201. hr = pushMessage(MsgType::ResetDevice).get();
  1202. if(FAILED(hr))
  1203. {
  1204. if(hr == E_OUTOFMEMORY)
  1205. throw al::backend_exception{al::backend_error::OutOfMemory, "Out of memory"};
  1206. throw al::backend_exception{al::backend_error::DeviceError, "Device reset failed"};
  1207. }
  1208. }
  1209. HRESULT WasapiCapture::openProxy(const char *name)
  1210. {
  1211. const wchar_t *devid{nullptr};
  1212. if(name)
  1213. {
  1214. auto iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
  1215. [name](const DevMap &entry) -> bool
  1216. { return entry.name == name || entry.endpoint_guid == name; });
  1217. if(iter == CaptureDevices.cend())
  1218. {
  1219. const std::wstring wname{utf8_to_wstr(name)};
  1220. iter = std::find_if(CaptureDevices.cbegin(), CaptureDevices.cend(),
  1221. [&wname](const DevMap &entry) -> bool
  1222. { return entry.devid == wname; });
  1223. }
  1224. if(iter == CaptureDevices.cend())
  1225. {
  1226. WARN("Failed to find device name matching \"%s\"\n", name);
  1227. return E_FAIL;
  1228. }
  1229. name = iter->name.c_str();
  1230. devid = iter->devid.c_str();
  1231. }
  1232. void *ptr;
  1233. HRESULT hr{CoCreateInstance(CLSID_MMDeviceEnumerator, nullptr, CLSCTX_INPROC_SERVER,
  1234. IID_IMMDeviceEnumerator, &ptr)};
  1235. if(SUCCEEDED(hr))
  1236. {
  1237. ComPtr<IMMDeviceEnumerator> enumerator{static_cast<IMMDeviceEnumerator*>(ptr)};
  1238. if(!devid)
  1239. hr = enumerator->GetDefaultAudioEndpoint(eCapture, eMultimedia, mMMDev.getPtr());
  1240. else
  1241. hr = enumerator->GetDevice(devid, mMMDev.getPtr());
  1242. }
  1243. if(FAILED(hr))
  1244. {
  1245. WARN("Failed to open device \"%s\"\n", name?name:"(default)");
  1246. return hr;
  1247. }
  1248. mClient = nullptr;
  1249. if(name) mDevice->DeviceName = std::string{DevNameHead} + name;
  1250. else mDevice->DeviceName = DevNameHead + get_device_name_and_guid(mMMDev.get()).first;
  1251. return hr;
  1252. }
  1253. void WasapiCapture::closeProxy()
  1254. {
  1255. mClient = nullptr;
  1256. mMMDev = nullptr;
  1257. }
  1258. HRESULT WasapiCapture::resetProxy()
  1259. {
  1260. mClient = nullptr;
  1261. void *ptr;
  1262. HRESULT hr{mMMDev->Activate(IID_IAudioClient, CLSCTX_INPROC_SERVER, nullptr, &ptr)};
  1263. if(FAILED(hr))
  1264. {
  1265. ERR("Failed to reactivate audio client: 0x%08lx\n", hr);
  1266. return hr;
  1267. }
  1268. mClient = ComPtr<IAudioClient>{static_cast<IAudioClient*>(ptr)};
  1269. WAVEFORMATEX *wfx;
  1270. hr = mClient->GetMixFormat(&wfx);
  1271. if(FAILED(hr))
  1272. {
  1273. ERR("Failed to get capture format: 0x%08lx\n", hr);
  1274. return hr;
  1275. }
  1276. TraceFormat("Device capture format", wfx);
  1277. WAVEFORMATEXTENSIBLE InputType{};
  1278. if(!MakeExtensible(&InputType, wfx))
  1279. {
  1280. CoTaskMemFree(wfx);
  1281. return E_FAIL;
  1282. }
  1283. CoTaskMemFree(wfx);
  1284. wfx = nullptr;
  1285. const bool isRear51{InputType.Format.nChannels == 6
  1286. && (InputType.dwChannelMask&X51RearMask) == X5DOT1REAR};
  1287. // Make sure buffer is at least 100ms in size
  1288. ReferenceTime buf_time{ReferenceTime{seconds{mDevice->BufferSize}} / mDevice->Frequency};
  1289. buf_time = std::max(buf_time, ReferenceTime{milliseconds{100}});
  1290. InputType = {};
  1291. InputType.Format.wFormatTag = WAVE_FORMAT_EXTENSIBLE;
  1292. switch(mDevice->FmtChans)
  1293. {
  1294. case DevFmtMono:
  1295. InputType.Format.nChannels = 1;
  1296. InputType.dwChannelMask = MONO;
  1297. break;
  1298. case DevFmtStereo:
  1299. InputType.Format.nChannels = 2;
  1300. InputType.dwChannelMask = STEREO;
  1301. break;
  1302. case DevFmtQuad:
  1303. InputType.Format.nChannels = 4;
  1304. InputType.dwChannelMask = QUAD;
  1305. break;
  1306. case DevFmtX51:
  1307. InputType.Format.nChannels = 6;
  1308. InputType.dwChannelMask = isRear51 ? X5DOT1REAR : X5DOT1;
  1309. break;
  1310. case DevFmtX61:
  1311. InputType.Format.nChannels = 7;
  1312. InputType.dwChannelMask = X6DOT1;
  1313. break;
  1314. case DevFmtX71:
  1315. InputType.Format.nChannels = 8;
  1316. InputType.dwChannelMask = X7DOT1;
  1317. break;
  1318. case DevFmtX3D71:
  1319. case DevFmtAmbi3D:
  1320. return E_FAIL;
  1321. }
  1322. switch(mDevice->FmtType)
  1323. {
  1324. /* NOTE: Signedness doesn't matter, the converter will handle it. */
  1325. case DevFmtByte:
  1326. case DevFmtUByte:
  1327. InputType.Format.wBitsPerSample = 8;
  1328. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1329. break;
  1330. case DevFmtShort:
  1331. case DevFmtUShort:
  1332. InputType.Format.wBitsPerSample = 16;
  1333. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1334. break;
  1335. case DevFmtInt:
  1336. case DevFmtUInt:
  1337. InputType.Format.wBitsPerSample = 32;
  1338. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_PCM;
  1339. break;
  1340. case DevFmtFloat:
  1341. InputType.Format.wBitsPerSample = 32;
  1342. InputType.SubFormat = KSDATAFORMAT_SUBTYPE_IEEE_FLOAT;
  1343. break;
  1344. }
  1345. InputType.Samples.wValidBitsPerSample = InputType.Format.wBitsPerSample;
  1346. InputType.Format.nSamplesPerSec = mDevice->Frequency;
  1347. InputType.Format.nBlockAlign = static_cast<WORD>(InputType.Format.nChannels *
  1348. InputType.Format.wBitsPerSample / 8);
  1349. InputType.Format.nAvgBytesPerSec = InputType.Format.nSamplesPerSec *
  1350. InputType.Format.nBlockAlign;
  1351. InputType.Format.cbSize = sizeof(InputType) - sizeof(InputType.Format);
  1352. TraceFormat("Requesting capture format", &InputType.Format);
  1353. hr = mClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, &InputType.Format, &wfx);
  1354. if(FAILED(hr))
  1355. {
  1356. WARN("Failed to check format support: 0x%08lx\n", hr);
  1357. hr = mClient->GetMixFormat(&wfx);
  1358. }
  1359. if(FAILED(hr))
  1360. {
  1361. ERR("Failed to check format support: 0x%08lx\n", hr);
  1362. return hr;
  1363. }
  1364. mSampleConv = nullptr;
  1365. mChannelConv = {};
  1366. if(wfx != nullptr)
  1367. {
  1368. TraceFormat("Got capture format", wfx);
  1369. if(!MakeExtensible(&InputType, wfx))
  1370. {
  1371. CoTaskMemFree(wfx);
  1372. return E_FAIL;
  1373. }
  1374. CoTaskMemFree(wfx);
  1375. wfx = nullptr;
  1376. auto validate_fmt = [](DeviceBase *device, uint32_t chancount, DWORD chanmask) noexcept
  1377. -> bool
  1378. {
  1379. switch(device->FmtChans)
  1380. {
  1381. /* If the device wants mono, we can handle any input. */
  1382. case DevFmtMono:
  1383. return true;
  1384. /* If the device wants stereo, we can handle mono or stereo input. */
  1385. case DevFmtStereo:
  1386. return (chancount == 2 && (chanmask == 0 || (chanmask&StereoMask) == STEREO))
  1387. || (chancount == 1 && (chanmask&MonoMask) == MONO);
  1388. /* Otherwise, the device must match the input type. */
  1389. case DevFmtQuad:
  1390. return (chancount == 4 && (chanmask == 0 || (chanmask&QuadMask) == QUAD));
  1391. /* 5.1 (Side) and 5.1 (Rear) are interchangeable here. */
  1392. case DevFmtX51:
  1393. return (chancount == 6 && (chanmask == 0 || (chanmask&X51Mask) == X5DOT1
  1394. || (chanmask&X51RearMask) == X5DOT1REAR));
  1395. case DevFmtX61:
  1396. return (chancount == 7 && (chanmask == 0 || (chanmask&X61Mask) == X6DOT1));
  1397. case DevFmtX71:
  1398. case DevFmtX3D71:
  1399. return (chancount == 8 && (chanmask == 0 || (chanmask&X71Mask) == X7DOT1));
  1400. case DevFmtAmbi3D:
  1401. return (chanmask == 0 && chancount == device->channelsFromFmt());
  1402. }
  1403. return false;
  1404. };
  1405. if(!validate_fmt(mDevice, InputType.Format.nChannels, InputType.dwChannelMask))
  1406. {
  1407. ERR("Failed to match format, wanted: %s %s %uhz, got: 0x%08lx mask %d channel%s %d-bit %luhz\n",
  1408. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1409. mDevice->Frequency, InputType.dwChannelMask, InputType.Format.nChannels,
  1410. (InputType.Format.nChannels==1)?"":"s", InputType.Format.wBitsPerSample,
  1411. InputType.Format.nSamplesPerSec);
  1412. return E_FAIL;
  1413. }
  1414. }
  1415. DevFmtType srcType{};
  1416. if(IsEqualGUID(InputType.SubFormat, KSDATAFORMAT_SUBTYPE_PCM))
  1417. {
  1418. if(InputType.Format.wBitsPerSample == 8)
  1419. srcType = DevFmtUByte;
  1420. else if(InputType.Format.wBitsPerSample == 16)
  1421. srcType = DevFmtShort;
  1422. else if(InputType.Format.wBitsPerSample == 32)
  1423. srcType = DevFmtInt;
  1424. else
  1425. {
  1426. ERR("Unhandled integer bit depth: %d\n", InputType.Format.wBitsPerSample);
  1427. return E_FAIL;
  1428. }
  1429. }
  1430. else if(IsEqualGUID(InputType.SubFormat, KSDATAFORMAT_SUBTYPE_IEEE_FLOAT))
  1431. {
  1432. if(InputType.Format.wBitsPerSample == 32)
  1433. srcType = DevFmtFloat;
  1434. else
  1435. {
  1436. ERR("Unhandled float bit depth: %d\n", InputType.Format.wBitsPerSample);
  1437. return E_FAIL;
  1438. }
  1439. }
  1440. else
  1441. {
  1442. ERR("Unhandled format sub-type: %s\n", GuidPrinter{InputType.SubFormat}.c_str());
  1443. return E_FAIL;
  1444. }
  1445. if(mDevice->FmtChans == DevFmtMono && InputType.Format.nChannels != 1)
  1446. {
  1447. uint chanmask{(1u<<InputType.Format.nChannels) - 1u};
  1448. /* Exclude LFE from the downmix. */
  1449. if((InputType.dwChannelMask&SPEAKER_LOW_FREQUENCY))
  1450. {
  1451. constexpr auto lfemask = MaskFromTopBits(SPEAKER_LOW_FREQUENCY);
  1452. const int lfeidx{al::popcount(InputType.dwChannelMask&lfemask) - 1};
  1453. chanmask &= ~(1u << lfeidx);
  1454. }
  1455. mChannelConv = ChannelConverter{srcType, InputType.Format.nChannels, chanmask,
  1456. mDevice->FmtChans};
  1457. TRACE("Created %s multichannel-to-mono converter\n", DevFmtTypeString(srcType));
  1458. /* The channel converter always outputs float, so change the input type
  1459. * for the resampler/type-converter.
  1460. */
  1461. srcType = DevFmtFloat;
  1462. }
  1463. else if(mDevice->FmtChans == DevFmtStereo && InputType.Format.nChannels == 1)
  1464. {
  1465. mChannelConv = ChannelConverter{srcType, 1, 0x1, mDevice->FmtChans};
  1466. TRACE("Created %s mono-to-stereo converter\n", DevFmtTypeString(srcType));
  1467. srcType = DevFmtFloat;
  1468. }
  1469. if(mDevice->Frequency != InputType.Format.nSamplesPerSec || mDevice->FmtType != srcType)
  1470. {
  1471. mSampleConv = CreateSampleConverter(srcType, mDevice->FmtType, mDevice->channelsFromFmt(),
  1472. InputType.Format.nSamplesPerSec, mDevice->Frequency, Resampler::FastBSinc24);
  1473. if(!mSampleConv)
  1474. {
  1475. ERR("Failed to create converter for %s format, dst: %s %uhz, src: %s %luhz\n",
  1476. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1477. mDevice->Frequency, DevFmtTypeString(srcType), InputType.Format.nSamplesPerSec);
  1478. return E_FAIL;
  1479. }
  1480. TRACE("Created converter for %s format, dst: %s %uhz, src: %s %luhz\n",
  1481. DevFmtChannelsString(mDevice->FmtChans), DevFmtTypeString(mDevice->FmtType),
  1482. mDevice->Frequency, DevFmtTypeString(srcType), InputType.Format.nSamplesPerSec);
  1483. }
  1484. hr = mClient->Initialize(AUDCLNT_SHAREMODE_SHARED, AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
  1485. buf_time.count(), 0, &InputType.Format, nullptr);
  1486. if(FAILED(hr))
  1487. {
  1488. ERR("Failed to initialize audio client: 0x%08lx\n", hr);
  1489. return hr;
  1490. }
  1491. UINT32 buffer_len{};
  1492. ReferenceTime min_per{};
  1493. hr = mClient->GetDevicePeriod(&reinterpret_cast<REFERENCE_TIME&>(min_per), nullptr);
  1494. if(SUCCEEDED(hr))
  1495. hr = mClient->GetBufferSize(&buffer_len);
  1496. if(FAILED(hr))
  1497. {
  1498. ERR("Failed to get buffer size: 0x%08lx\n", hr);
  1499. return hr;
  1500. }
  1501. mDevice->UpdateSize = RefTime2Samples(min_per, mDevice->Frequency);
  1502. mDevice->BufferSize = buffer_len;
  1503. mRing = RingBuffer::Create(buffer_len, mDevice->frameSizeFromFmt(), false);
  1504. hr = mClient->SetEventHandle(mNotifyEvent);
  1505. if(FAILED(hr))
  1506. {
  1507. ERR("Failed to set event handle: 0x%08lx\n", hr);
  1508. return hr;
  1509. }
  1510. return hr;
  1511. }
  1512. void WasapiCapture::start()
  1513. {
  1514. const HRESULT hr{pushMessage(MsgType::StartDevice).get()};
  1515. if(FAILED(hr))
  1516. throw al::backend_exception{al::backend_error::DeviceError,
  1517. "Failed to start recording: 0x%lx", hr};
  1518. }
  1519. HRESULT WasapiCapture::startProxy()
  1520. {
  1521. ResetEvent(mNotifyEvent);
  1522. HRESULT hr{mClient->Start()};
  1523. if(FAILED(hr))
  1524. {
  1525. ERR("Failed to start audio client: 0x%08lx\n", hr);
  1526. return hr;
  1527. }
  1528. void *ptr;
  1529. hr = mClient->GetService(IID_IAudioCaptureClient, &ptr);
  1530. if(SUCCEEDED(hr))
  1531. {
  1532. mCapture = ComPtr<IAudioCaptureClient>{static_cast<IAudioCaptureClient*>(ptr)};
  1533. try {
  1534. mKillNow.store(false, std::memory_order_release);
  1535. mThread = std::thread{std::mem_fn(&WasapiCapture::recordProc), this};
  1536. }
  1537. catch(...) {
  1538. mCapture = nullptr;
  1539. ERR("Failed to start thread\n");
  1540. hr = E_FAIL;
  1541. }
  1542. }
  1543. if(FAILED(hr))
  1544. {
  1545. mClient->Stop();
  1546. mClient->Reset();
  1547. }
  1548. return hr;
  1549. }
  1550. void WasapiCapture::stop()
  1551. { pushMessage(MsgType::StopDevice).wait(); }
  1552. void WasapiCapture::stopProxy()
  1553. {
  1554. if(!mCapture || !mThread.joinable())
  1555. return;
  1556. mKillNow.store(true, std::memory_order_release);
  1557. mThread.join();
  1558. mCapture = nullptr;
  1559. mClient->Stop();
  1560. mClient->Reset();
  1561. }
  1562. void WasapiCapture::captureSamples(al::byte *buffer, uint samples)
  1563. { mRing->read(buffer, samples); }
  1564. uint WasapiCapture::availableSamples()
  1565. { return static_cast<uint>(mRing->readSpace()); }
  1566. } // namespace
  1567. bool WasapiBackendFactory::init()
  1568. {
  1569. static HRESULT InitResult{E_FAIL};
  1570. if(FAILED(InitResult)) try
  1571. {
  1572. std::promise<HRESULT> promise;
  1573. auto future = promise.get_future();
  1574. std::thread{&WasapiProxy::messageHandler, &promise}.detach();
  1575. InitResult = future.get();
  1576. }
  1577. catch(...) {
  1578. }
  1579. return SUCCEEDED(InitResult);
  1580. }
  1581. bool WasapiBackendFactory::querySupport(BackendType type)
  1582. { return type == BackendType::Playback || type == BackendType::Capture; }
  1583. std::string WasapiBackendFactory::probe(BackendType type)
  1584. {
  1585. std::string outnames;
  1586. switch(type)
  1587. {
  1588. case BackendType::Playback:
  1589. WasapiProxy::pushMessageStatic(MsgType::EnumeratePlayback).wait();
  1590. for(const DevMap &entry : PlaybackDevices)
  1591. {
  1592. /* +1 to also append the null char (to ensure a null-separated list
  1593. * and double-null terminated list).
  1594. */
  1595. outnames.append(DevNameHead).append(entry.name.c_str(), entry.name.length()+1);
  1596. }
  1597. break;
  1598. case BackendType::Capture:
  1599. WasapiProxy::pushMessageStatic(MsgType::EnumerateCapture).wait();
  1600. for(const DevMap &entry : CaptureDevices)
  1601. outnames.append(DevNameHead).append(entry.name.c_str(), entry.name.length()+1);
  1602. break;
  1603. }
  1604. return outnames;
  1605. }
  1606. BackendPtr WasapiBackendFactory::createBackend(DeviceBase *device, BackendType type)
  1607. {
  1608. if(type == BackendType::Playback)
  1609. return BackendPtr{new WasapiPlayback{device}};
  1610. if(type == BackendType::Capture)
  1611. return BackendPtr{new WasapiCapture{device}};
  1612. return nullptr;
  1613. }
  1614. BackendFactory &WasapiBackendFactory::getFactory()
  1615. {
  1616. static WasapiBackendFactory factory{};
  1617. return factory;
  1618. }