0

所以我正在使用 MCISendString 用 Delphi 录制音频。该代码工作正常,但有一个例外。我让用户设置参数、通道、位、频率,在 Windows 7 上这很好用。现在在 Windows 10 上,我得到 mcierr_wave_inputsinuse。

如果我将参数减少到 1 个通道、8 位、8khz,它记录得很好。任何高于此的内容,MCI 都会拒绝。根据我的设备,它应该是 1 通道、16 位、48k。

我猜麦克风是共享的。有谁知道如何让 Delphi 程序获得独占控制,以便它可以使用设备的全部功能?

我做了很多挖掘工作,但没有找到任何有用的东西。

谢谢

这是我正在使用的代码。

 MRet := mciSendString(PChar('RECORD mysound'), NIL, 0, Handle);

它返回一个 322 结果代码。我已经尝试过转换为 PChar 和不转换为 PChar。

它似乎与 Cortana 有关。但由于无法完全禁用 Cortana,因此获得独占控制权似乎是唯一可能的解决方案。

4

1 回答 1

1

您可以在此处找到使用 WasAPI 的简单 Delphi 应用程序示例:

https://web.archive.org/web/20130403023149/http://4coder.org/delphi-source-code/547/

请注意,原网站早已下线,但 Wayback 仍然拥有所有代码。

进行实际录制的代码在这里:

// http://msdn.microsoft.com/en-us/library/ms678709(VS.85).aspx
procedure TInputRecordThread.Execute;
const
  REFTIMES_PER_SEC = 10000000;
  REFTIMES_PER_MILLISEC = 10000;
var
  MMDev: IMMDevice;
  MMDevEnum: IMMDeviceEnumerator;
  AudioClient: IAudioClient;
  CaptureClient: IAudioCaptureClient;
  PropVar: ^tag_inner_PROPVARIANT;
  hnsRequestedDuration, hnsActualDuration: Int64;
  pWfx, pCloseWfx: PWaveFormatEx;
  BufferFrameCount, NumFramesAvailable, Flags, StreamFlags, PacketLength, FrameSize: Cardinal;
  pData: PByte;
  uDummy: UInt64;
  Returned: HRESULT;
  Wave: TWaveImage;
  Empty: array of byte;
  pEx: PWaveFormatExtensible;
begin
  FreeOnTerminate := True;
  pCloseWfx := nil;
  uDummy := 0;
  PropVar := nil;

  CoInitializeEx(nil, COINIT_APARTMENTTHREADED);
  CoCreateInstance(CLASS_MMDeviceEnumerator,
    nil,
    CLSCTX_ALL,
    IID_IMMDeviceEnumerator,
    MMDevEnum);

  if FLoopback then
    Returned := MMDevEnum.GetDefaultAudioEndpoint(eRender, eConsole, MMDev)
  else
    Returned := MMDevEnum.GetDefaultAudioEndpoint(eCapture, eConsole, MMDev);

  if Returned <> S_OK then
  begin
    OleCheck(Returned);
    Exit;
  end;

  Returned := MMDev.Activate(IID_IAudioClient, CLSCTX_ALL, PropVar^, Pointer(AudioClient));
  if Returned <> S_OK then
  begin
    OleCheck(Returned);
    Exit;
  end;

  AudioClient.GetMixFormat(pWfx);

  // http://www.ambisonic.net/mulchaud.html
  case pWfx.wFormatTag of
    WAVE_FORMAT_IEEE_FLOAT:
      begin
        pWfx.wFormatTag := WAVE_FORMAT_PCM;
        pWfx.wBitsPerSample := 16;
        pWfx.nBlockAlign := pWfx.nChannels * pWfx.wBitsPerSample div 8;
        pWfx.nAvgBytesPerSec := pWfx.nBlockAlign * pWfx.nSamplesPerSec;
      end;
    WAVE_FORMAT_EXTENSIBLE:
      begin
        pEx := PWaveFormatExtensible(pWfx);
        if not IsEqualGUID(KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, pEx.SubFormat) then
        begin
          Exit;
        end;

        pEx.SubFormat := KSDATAFORMAT_SUBTYPE_PCM;
        pEx.ValidBitsPerSample := 16;
        pWfx.wBitsPerSample := 16;
        pWfx.nBlockAlign := pWfx.nChannels * pWfx.wBitsPerSample div 8;
        pWfx.nAvgBytesPerSec := pWfx.nBlockAlign * pWfx.nSamplesPerSec;
      end;
    else Exit;
  end;

  if AudioClient.IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, pWfx, pCloseWfx) <> S_OK then
  begin
    Exit;
  end;

  // Äŕçěĺđ ôđýéěŕ.
  FrameSize := pWfx.wBitsPerSample * pWfx.nChannels div 8;

  hnsRequestedDuration := REFTIMES_PER_SEC;
  if FLoopback then
    StreamFlags := AUDCLNT_STREAMFLAGS_LOOPBACK
  else
    StreamFlags := 0;
  Returned := AudioClient.Initialize(AUDCLNT_SHAREMODE_SHARED,
    StreamFlags,
    hnsRequestedDuration,
    0,
    pWfx,
    nil);
  if Returned <> S_OK then
  begin
    Exit;
  end;

  AudioClient.GetBufferSize(BufferFrameCount);

  Returned := AudioClient.GetService(IID_IAudioCaptureClient, Pointer(CaptureClient));
  if Returned <> S_OK then
  begin
    Exit;
  end;

  // Calculate the actual duration of the allocated buffer.
  hnsActualDuration := REFTIMES_PER_SEC * BufferFrameCount div pWfx.nSamplesPerSec;

  // Start recording.
  AudioClient.Start();

  Wave := TWaveImage.Create(FData);
  try
    Wave.InitHeader(pWfx^);

    // Each loop fills about half of the shared buffer.
    while not Terminated do
    begin
      // Sleep for half the buffer duration.
      Sleep(hnsActualDuration div REFTIMES_PER_MILLISEC div 2);

      CaptureClient.GetNextPacketSize(PacketLength);

      while PacketLength <> 0 do
      begin
        // Get the available data in the shared buffer.
        pData := nil;
        Returned := CaptureClient.GetBuffer(pData,
          NumFramesAvailable,
          Flags,
          uDummy,
          uDummy);

        if Returned <> S_OK then
        begin
          Exit;
        end;

        if (Flags or Cardinal(AUDCLNT_BUFFERFLAGS_SILENT)) = Flags then
        begin
          pData := nil;  // Tell CopyData to write silence.
        end;

        if pData = nil then
        begin
          SetLength(Empty, NumFramesAvailable * FrameSize);
          FillChar(Empty[0], Length(Empty), 0);
          FData.Write(Empty[0], Length(Empty));
        end
        else
        begin
          // Ĺîőđŕí˙ĺě äŕííűĺ.
          FData.Write(pData^, NumFramesAvailable * FrameSize);
        end;

        CaptureClient.ReleaseBuffer(NumFramesAvailable);
        CaptureClient.GetNextPacketSize(PacketLength);
      end;
    end;

    // ĂŽĹ„Ĺŕíŕâëčâŕĺě çŕďčńü.
    AudioClient.Stop();

    // ĂŽĹęîđđĺĹčđóĺě çŕÄîëîâîę.
    Wave.CorretHeader;
    FData.Position := 0;
  finally
    Wave.Free;

    if pWfx <> nil then
      CoTaskMemFree(pWfx);
  end;
end;

您可能还想查看:http ://torry.net/pages.php?id=167&sort=ID

于 2016-03-20T01:08:25.597 回答