diff --git a/Docs/FadeInOutSampleProvider.md b/Docs/FadeInOutSampleProvider.md
index c267e608..32439977 100644
--- a/Docs/FadeInOutSampleProvider.md
+++ b/Docs/FadeInOutSampleProvider.md
@@ -14,6 +14,14 @@ var fade = new FadeInOutSampleProvider(audio, true);
fade.BeginFadeIn(2000);
```
+After fade in is complete, `FadeInComplete` event is triggered:
+```c#
+fade.FadeInComplete += (sender, e) => {
+ Console.WriteLine("Fade in was done!");
+};
+fade.BeginFadeIn(2000);
+```
+
Now we can pass our `FadeInOutSampleProvider` to an output device and start playing. We'll hear the audio fading in over the first two seconds.
```c#
@@ -28,6 +36,15 @@ At some point in the future, we might want to fade out, and we can trigger that
fade.BeginFadeOut(2000);
```
+It also triggers `FadeOutComplete` event, when fade out is complete
+
+```c#
+fade.FadeOutComplete += (sender, e) => {
+ Console.WriteLine("Fade out was done!");
+};
+fade.BeginFadeOut(2000);
+```
+
Once the audio has faded out, the `FadeInOutSampleProvider` continues to read from its source but emits silence until it reaches its end, or until you call `BeginFadeIn` again.
### Taking it further
diff --git a/NAudio.Asio/ASIODriverExt.cs b/NAudio.Asio/ASIODriverExt.cs
index 85e0bccc..d1bb0cf7 100644
--- a/NAudio.Asio/ASIODriverExt.cs
+++ b/NAudio.Asio/ASIODriverExt.cs
@@ -31,6 +31,9 @@ public class AsioDriverExt
private int bufferSize;
private int outputChannelOffset;
private int inputChannelOffset;
+ ///
+ /// Reset Request Callback
+ ///
public Action ResetRequestCallback;
///
diff --git a/NAudio.Asio/NAudio.Asio.csproj b/NAudio.Asio/NAudio.Asio.csproj
index 90415ee9..1001b234 100644
--- a/NAudio.Asio/NAudio.Asio.csproj
+++ b/NAudio.Asio/NAudio.Asio.csproj
@@ -3,7 +3,7 @@
netstandard2.0
true
- 2.2.1
+ 2.2.1-SA
true
true
Mark Heath
diff --git a/NAudio.Core/FileFormats/Wav/WaveFileChunkReader.cs b/NAudio.Core/FileFormats/Wav/WaveFileChunkReader.cs
index e57376d0..a3a78078 100644
--- a/NAudio.Core/FileFormats/Wav/WaveFileChunkReader.cs
+++ b/NAudio.Core/FileFormats/Wav/WaveFileChunkReader.cs
@@ -1,13 +1,15 @@
using System;
using System.Collections.Generic;
-using System.Text;
+using System.Diagnostics;
using System.IO;
using NAudio.Utils;
using NAudio.Wave;
-using System.Diagnostics;
namespace NAudio.FileFormats.Wav
{
+ ///
+ /// Reader of RIFF chunks from a WAV file
+ ///
public class WaveFileChunkReader
{
private WaveFormat waveFormat;
@@ -19,12 +21,18 @@ public class WaveFileChunkReader
private readonly bool storeAllChunks;
private long riffSize;
+ ///
+ /// Creates a new WaveFileChunkReader
+ ///
public WaveFileChunkReader()
{
storeAllChunks = true;
strictMode = false;
}
+ ///
+ /// Read the WAV header
+ ///
public void ReadWaveHeader(Stream stream)
{
this.dataChunkPosition = -1;
diff --git a/NAudio.Core/MmException.cs b/NAudio.Core/MmException.cs
index 26dd5d05..d08ef836 100644
--- a/NAudio.Core/MmException.cs
+++ b/NAudio.Core/MmException.cs
@@ -22,7 +22,7 @@ public MmException(MmResult result, string function)
private static string ErrorMessage(MmResult result, string function)
{
- return $"{result} calling {function}";
+ return $"Windows MMAPI returned \"{result}\" after call to \"{function}\"";
}
///
diff --git a/NAudio.Core/NAudio.Core.csproj b/NAudio.Core/NAudio.Core.csproj
index 64c7dd44..fefd4859 100644
--- a/NAudio.Core/NAudio.Core.csproj
+++ b/NAudio.Core/NAudio.Core.csproj
@@ -3,7 +3,7 @@
netstandard2.0
Mark Heath
- 2.2.1
+ 2.2.1-SA
true
true
true
diff --git a/NAudio.Core/Wave/SampleProviders/FadeInOutSampleProvider.cs b/NAudio.Core/Wave/SampleProviders/FadeInOutSampleProvider.cs
index e589b6ca..b3522f5a 100644
--- a/NAudio.Core/Wave/SampleProviders/FadeInOutSampleProvider.cs
+++ b/NAudio.Core/Wave/SampleProviders/FadeInOutSampleProvider.cs
@@ -1,4 +1,6 @@
-namespace NAudio.Wave.SampleProviders
+using System;
+
+namespace NAudio.Wave.SampleProviders
{
///
/// Sample Provider to allow fading in and out
@@ -13,6 +15,16 @@ enum FadeState
FadingOut,
}
+ ///
+ /// Raised when scheduled fade-in is done
+ ///
+ public event EventHandler FadeInComplete;
+
+ ///
+ /// Raised when scheduled fade-out is done
+ ///
+ public event EventHandler FadeOutComplete;
+
private readonly object lockObject = new object();
private readonly ISampleProvider source;
private int fadeSamplePosition;
@@ -108,6 +120,8 @@ private void FadeOut(float[] buffer, int offset, int sourceSamplesRead)
if (fadeSamplePosition > fadeSampleCount)
{
fadeState = FadeState.Silence;
+ FadeOutComplete?.Invoke(this, EventArgs.Empty);
+
// clear out the end
ClearBuffer(buffer, sample + offset, sourceSamplesRead - sample);
break;
@@ -129,6 +143,8 @@ private void FadeIn(float[] buffer, int offset, int sourceSamplesRead)
if (fadeSamplePosition > fadeSampleCount)
{
fadeState = FadeState.FullVolume;
+ FadeInComplete?.Invoke(this, EventArgs.Empty);
+
// no need to multiply any more
break;
}
diff --git a/NAudio.Core/Wave/WaveStreams/Mp3FileReaderBase.cs b/NAudio.Core/Wave/WaveStreams/Mp3FileReaderBase.cs
index 3d6bc4a9..91d58321 100644
--- a/NAudio.Core/Wave/WaveStreams/Mp3FileReaderBase.cs
+++ b/NAudio.Core/Wave/WaveStreams/Mp3FileReaderBase.cs
@@ -73,6 +73,13 @@ public Mp3FileReaderBase(Stream inputStream, FrameDecompressorBuilder frameDecom
}
+ ///
+ /// Constructor that takes an input stream and a frame decompressor builder
+ ///
+ /// Input stream
+ /// Factory method to build a frame decompressor
+ /// Whether we own the stream and should dispose it
+ ///
protected Mp3FileReaderBase(Stream inputStream, FrameDecompressorBuilder frameDecompressorBuilder, bool ownInputStream)
{
if (inputStream == null) throw new ArgumentNullException(nameof(inputStream));
diff --git a/NAudio.Extras/AudioPlaybackEngine.cs b/NAudio.Extras/AudioPlaybackEngine.cs
index 8ce770e4..6964ae91 100644
--- a/NAudio.Extras/AudioPlaybackEngine.cs
+++ b/NAudio.Extras/AudioPlaybackEngine.cs
@@ -12,6 +12,9 @@ public class AudioPlaybackEngine : IDisposable
private readonly IWavePlayer outputDevice;
private readonly MixingSampleProvider mixer;
+ ///
+ /// Audio Playback Engine
+ ///
public AudioPlaybackEngine(int sampleRate = 44100, int channelCount = 2)
{
outputDevice = new WaveOutEvent();
@@ -21,6 +24,9 @@ public AudioPlaybackEngine(int sampleRate = 44100, int channelCount = 2)
outputDevice.Play();
}
+ ///
+ /// Fire and forget playback of sound
+ ///
public void PlaySound(string fileName)
{
var input = new AudioFileReader(fileName);
@@ -40,6 +46,9 @@ private ISampleProvider ConvertToRightChannelCount(ISampleProvider input)
throw new NotImplementedException("Not yet implemented this channel count conversion");
}
+ ///
+ /// Fire and forget playback of a cached sound
+ ///
public void PlaySound(CachedSound sound)
{
AddMixerInput(new CachedSoundSampleProvider(sound));
@@ -50,6 +59,9 @@ private void AddMixerInput(ISampleProvider input)
mixer.AddMixerInput(ConvertToRightChannelCount(input));
}
+ ///
+ /// Disposes this instance
+ ///
public void Dispose()
{
outputDevice.Dispose();
diff --git a/NAudio.Extras/AutoDisposeFileReader.cs b/NAudio.Extras/AutoDisposeFileReader.cs
index 1b5100bf..6d43961b 100644
--- a/NAudio.Extras/AutoDisposeFileReader.cs
+++ b/NAudio.Extras/AutoDisposeFileReader.cs
@@ -11,12 +11,19 @@ public class AutoDisposeFileReader : ISampleProvider
{
private readonly ISampleProvider reader;
private bool isDisposed;
+
+ ///
+ /// Creates a new file reader that disposes the source reader when it finishes
+ ///
public AutoDisposeFileReader(ISampleProvider reader)
{
this.reader = reader;
WaveFormat = reader.WaveFormat;
}
+ ///
+ /// Reads samples from this file reader
+ ///
public int Read(float[] buffer, int offset, int count)
{
if (isDisposed)
@@ -33,6 +40,9 @@ public int Read(float[] buffer, int offset, int count)
return read;
}
+ ///
+ /// The WaveFormat of this file reader
+ ///
public WaveFormat WaveFormat { get; }
}
}
\ No newline at end of file
diff --git a/NAudio.Extras/CachedSound.cs b/NAudio.Extras/CachedSound.cs
index 19beca51..f05b3c53 100644
--- a/NAudio.Extras/CachedSound.cs
+++ b/NAudio.Extras/CachedSound.cs
@@ -9,8 +9,19 @@ namespace NAudio.Extras
///
public class CachedSound
{
+ ///
+ /// Audio data
+ ///
public float[] AudioData { get; }
+
+ ///
+ /// Format of the audio
+ ///
public WaveFormat WaveFormat { get; }
+
+ ///
+ /// Creates a new CachedSound from a file
+ ///
public CachedSound(string audioFileName)
{
using (var audioFileReader = new AudioFileReader(audioFileName))
diff --git a/NAudio.Extras/Equalizer.cs b/NAudio.Extras/Equalizer.cs
index 0c73e81f..406080a1 100644
--- a/NAudio.Extras/Equalizer.cs
+++ b/NAudio.Extras/Equalizer.cs
@@ -18,6 +18,9 @@ public class Equalizer : ISampleProvider
private readonly int bandCount;
private bool updated;
+ ///
+ /// Creates a new Equalizer
+ ///
public Equalizer(ISampleProvider sourceProvider, EqualizerBand[] bands)
{
this.sourceProvider = sourceProvider;
@@ -43,14 +46,23 @@ private void CreateFilters()
}
}
+ ///
+ /// Update the equalizer settings
+ ///
public void Update()
{
updated = true;
CreateFilters();
}
+ ///
+ /// Gets the WaveFormat of this Sample Provider
+ ///
public WaveFormat WaveFormat => sourceProvider.WaveFormat;
+ ///
+ /// Reads samples from this Sample Provider
+ ///
public int Read(float[] buffer, int offset, int count)
{
int samplesRead = sourceProvider.Read(buffer, offset, count);
diff --git a/NAudio.Extras/EqualizerBand.cs b/NAudio.Extras/EqualizerBand.cs
index c22f5eb5..2ead2e4c 100644
--- a/NAudio.Extras/EqualizerBand.cs
+++ b/NAudio.Extras/EqualizerBand.cs
@@ -1,9 +1,21 @@
namespace NAudio.Extras
{
+ ///
+ /// Equalizer Band
+ ///
public class EqualizerBand
{
+ ///
+ /// Frequency
+ ///
public float Frequency { get; set; }
+ ///
+ /// Gain
+ ///
public float Gain { get; set; }
+ ///
+ /// Bandwidth
+ ///
public float Bandwidth { get; set; }
}
}
\ No newline at end of file
diff --git a/NAudio.Extras/NAudio.Extras.csproj b/NAudio.Extras/NAudio.Extras.csproj
index 578cf31a..6a0a3b59 100644
--- a/NAudio.Extras/NAudio.Extras.csproj
+++ b/NAudio.Extras/NAudio.Extras.csproj
@@ -12,7 +12,7 @@
https://github.com/naudio/NAudio
© Mark Heath 2023
MIT
- 2.2.1
+ 2.2.1-SA
naudio-icon.png
diff --git a/NAudio.Extras/SampleAggregator.cs b/NAudio.Extras/SampleAggregator.cs
index 957df0f9..52de0074 100644
--- a/NAudio.Extras/SampleAggregator.cs
+++ b/NAudio.Extras/SampleAggregator.cs
@@ -10,16 +10,28 @@ namespace NAudio.Extras
///
public class SampleAggregator : ISampleProvider
{
- // volume
+ ///
+ /// Raised to indicate the maximum volume level in this period
+ ///
public event EventHandler MaximumCalculated;
private float maxValue;
private float minValue;
+ ///
+ /// Notification count, number of samples between MaximumCalculated events
+ ///
public int NotificationCount { get; set; }
int count;
- // FFT
+ ///
+ /// Raised to indicate that a block of samples has had an FFT performed on it
+ ///
public event EventHandler FftCalculated;
+
+ ///
+ /// If true, performs an FFT on each block of samples
+ ///
public bool PerformFFT { get; set; }
+
private readonly Complex[] fftBuffer;
private readonly FftEventArgs fftArgs;
private int fftPos;
@@ -29,6 +41,11 @@ public class SampleAggregator : ISampleProvider
private readonly int channels;
+ ///
+ /// Creates a new SampleAggregator
+ ///
+ /// source sample provider
+ /// FFT length, must be a power of 2
public SampleAggregator(ISampleProvider source, int fftLength = 1024)
{
channels = source.WaveFormat.Channels;
@@ -48,7 +65,9 @@ static bool IsPowerOfTwo(int x)
return (x & (x - 1)) == 0;
}
-
+ ///
+ /// Reset the volume calculation
+ ///
public void Reset()
{
count = 0;
@@ -81,8 +100,14 @@ private void Add(float value)
}
}
+ ///
+ /// Gets the WaveFormat of this Sample Provider
+ ///
public WaveFormat WaveFormat => source.WaveFormat;
+ ///
+ /// Reads samples from this sample provider
+ ///
public int Read(float[] buffer, int offset, int count)
{
var samplesRead = source.Read(buffer, offset, count);
@@ -95,25 +120,46 @@ public int Read(float[] buffer, int offset, int count)
}
}
+ ///
+ /// Max sample event args
+ ///
public class MaxSampleEventArgs : EventArgs
{
+ ///
+ /// Creates a new MaxSampleEventArgs
+ ///
[DebuggerStepThrough]
public MaxSampleEventArgs(float minValue, float maxValue)
{
MaxSample = maxValue;
MinSample = minValue;
}
+ ///
+ /// Maximum sample value in this period
+ ///
public float MaxSample { get; private set; }
+ ///
+ /// Minimum sample value in this period
+ ///
public float MinSample { get; private set; }
}
+ ///
+ /// FFT Event Args
+ ///
public class FftEventArgs : EventArgs
{
+ ///
+ /// Creates a new FFTEventArgs
+ ///
[DebuggerStepThrough]
public FftEventArgs(Complex[] result)
{
Result = result;
}
+ ///
+ /// Result of FFT
+ ///
public Complex[] Result { get; private set; }
}
}
diff --git a/NAudio.Midi/NAudio.Midi.csproj b/NAudio.Midi/NAudio.Midi.csproj
index e5205ad6..15a2b5ca 100644
--- a/NAudio.Midi/NAudio.Midi.csproj
+++ b/NAudio.Midi/NAudio.Midi.csproj
@@ -2,7 +2,7 @@
netstandard2.0
- 2.2.1
+ 2.2.1-SA
true
true
Mark Heath
diff --git a/NAudio.Uap/NAudio.Uap.csproj b/NAudio.Uap/NAudio.Uap.csproj
index d29c49d7..a4ea63ec 100644
--- a/NAudio.Uap/NAudio.Uap.csproj
+++ b/NAudio.Uap/NAudio.Uap.csproj
@@ -3,7 +3,7 @@
uap10.0.18362
- 2.2.1
+ 2.2.1-SA
true
true
Mark Heath
diff --git a/NAudio.Wasapi/CoreAudioApi/AudioClient.cs b/NAudio.Wasapi/CoreAudioApi/AudioClient.cs
index cc11a0f9..1bcc055f 100644
--- a/NAudio.Wasapi/CoreAudioApi/AudioClient.cs
+++ b/NAudio.Wasapi/CoreAudioApi/AudioClient.cs
@@ -56,6 +56,10 @@ public static async Task ActivateAsync(string deviceInterfacePath,
return new AudioClient((IAudioClient)audioClient2);
}
+ ///
+ /// Creates a new AudioClient
+ ///
+ ///
public AudioClient(IAudioClient audioClientInterface)
{
this.audioClientInterface = audioClientInterface;
diff --git a/NAudio.Wasapi/CoreAudioApi/AudioClientStreamFlags.cs b/NAudio.Wasapi/CoreAudioApi/AudioClientStreamFlags.cs
index 4d8fd9cf..5a7d076e 100644
--- a/NAudio.Wasapi/CoreAudioApi/AudioClientStreamFlags.cs
+++ b/NAudio.Wasapi/CoreAudioApi/AudioClientStreamFlags.cs
@@ -54,6 +54,8 @@ public enum AudioClientStreamFlags : uint
}
+ /* not currently used
+
///
/// AUDIOCLIENT_ACTIVATION_PARAMS
/// https://docs.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ns-audioclientactivationparams-audioclient_activation_params
@@ -79,6 +81,8 @@ struct AudioClientProcessLoopbackParams
public ProcessLoopbackMode ProcessLoopbackMode;
}
+ */
+
///
/// PROCESS_LOOPBACK_MODE
/// https://docs.microsoft.com/en-us/windows/win32/api/audioclientactivationparams/ne-audioclientactivationparams-process_loopback_mode
diff --git a/NAudio.Wasapi/CoreAudioApi/AudioMute.cs b/NAudio.Wasapi/CoreAudioApi/AudioMute.cs
index 3663c1e0..326836ee 100644
--- a/NAudio.Wasapi/CoreAudioApi/AudioMute.cs
+++ b/NAudio.Wasapi/CoreAudioApi/AudioMute.cs
@@ -1,11 +1,11 @@
-using NAudio.CoreAudioApi.Interfaces;
-using NAudio.Wasapi.CoreAudioApi.Interfaces;
-using System;
-using System.Collections.Generic;
-using System.Text;
+using System;
+using NAudio.CoreAudioApi.Interfaces;
namespace NAudio.CoreAudioApi
{
+ ///
+ /// Audio Mute
+ ///
public class AudioMute
{
private IAudioMute audioMuteInterface;
@@ -14,6 +14,9 @@ internal AudioMute(IAudioMute audioMute)
audioMuteInterface = audioMute;
}
+ ///
+ /// Is Muted
+ ///
public bool IsMuted
{
get
diff --git a/NAudio.Wasapi/CoreAudioApi/AudioVolumeLevel.cs b/NAudio.Wasapi/CoreAudioApi/AudioVolumeLevel.cs
index 5543ee9d..4e791b27 100644
--- a/NAudio.Wasapi/CoreAudioApi/AudioVolumeLevel.cs
+++ b/NAudio.Wasapi/CoreAudioApi/AudioVolumeLevel.cs
@@ -15,6 +15,9 @@ internal AudioVolumeLevel(IAudioVolumeLevel audioVolumeLevel)
audioVolumeLevelInterface = audioVolumeLevel;
}
+ ///
+ /// Channel Count
+ ///
public uint ChannelCount
{
get
@@ -24,29 +27,55 @@ public uint ChannelCount
}
}
+ ///
+ /// Get Level Range
+ ///
+ /// Channel
+ /// Minimum Level dB
+ /// Maximum Level dB
+ /// Stepping
public void GetLevelRange(uint channel, out float minLevelDb, out float maxLevelDb, out float stepping)
{
audioVolumeLevelInterface.GetLevelRange(channel, out minLevelDb, out maxLevelDb, out stepping);
}
+ ///
+ /// Get Channel Volume Level
+ ///
+ /// Channel
+ /// Volume Level
public float GetLevel(uint channel)
{
audioVolumeLevelInterface.GetLevel(channel, out float result);
return result;
}
+ ///
+ /// Set Channel Volume Level
+ ///
+ /// Channel
+ /// Volume
public void SetLevel(uint channel, float value)
{
var guid = Guid.Empty;
audioVolumeLevelInterface.SetLevel(channel, value, ref guid);
}
+ ///
+ /// Sets all channels in the audio stream to the same uniform volume level, in decibels.
+ ///
+ /// Volume in decibels
public void SetLevelUniform(float value)
{
var guid = Guid.Empty;
audioVolumeLevelInterface.SetLevelUniform(value, ref guid);
}
+ ///
+ /// sets the volume levels, in decibels, of all the channels in the audio stream
+ ///
+ /// Volume levels in decibels
+ /// Channels
public void SetLevelAllChannel(float[] values, uint channels)
{
var guid = Guid.Empty;
diff --git a/NAudio.Wasapi/CoreAudioApi/Connector.cs b/NAudio.Wasapi/CoreAudioApi/Connector.cs
index c51cc3c5..861794a3 100644
--- a/NAudio.Wasapi/CoreAudioApi/Connector.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Connector.cs
@@ -1,113 +1,116 @@
-using NAudio.CoreAudioApi.Interfaces;
-
-namespace NAudio.CoreAudioApi
-{
- ///
- /// Connector
- ///
- public class Connector
- {
- private readonly IConnector connectorInterface;
-
- internal Connector(IConnector connector)
- {
- connectorInterface = connector;
- }
-
- ///
- /// Connects this connector to a connector in another device-topology object
- ///
- public void ConnectTo(Connector other)
- {
- connectorInterface.ConnectTo(other.connectorInterface);
- }
-
- ///
- /// Retreives the type of this connector
- ///
- public ConnectorType Type
- {
- get
- {
- connectorInterface.GetType(out var result);
- return result;
- }
- }
-
- ///
- /// Retreives the data flow of this connector
- ///
- public DataFlow DataFlow
- {
- get
- {
- connectorInterface.GetDataFlow(out var result);
- return result;
- }
- }
-
- ///
- /// Disconnects this connector from it's connected connector (if connected)
- ///
- public void Disconnect()
- {
- connectorInterface.Disconnect();
- }
-
- ///
- /// Indicates whether this connector is connected to another connector
- ///
- public bool IsConnected
- {
- get
- {
- connectorInterface.IsConnected(out var result);
- return result;
- }
- }
-
- ///
- /// Retreives the connector this connector is connected to (if connected)
- ///
- public Connector ConnectedTo
- {
- get
- {
- connectorInterface.GetConnectedTo(out var result);
- return new Connector(result);
- }
- }
-
- ///
- /// Retreives the global ID of the connector this connector is connected to (if connected)
- ///
- public string ConnectedToConnectorId
- {
- get
- {
- connectorInterface.GetConnectorIdConnectedTo(out var result);
- return result;
- }
- }
-
- ///
- /// Retreives the device ID of the audio device this connector is connected to (if connected)
- ///
- public string ConnectedToDeviceId
- {
- get
- {
- connectorInterface.GetDeviceIdConnectedTo(out var result);
- return result;
- }
+using NAudio.CoreAudioApi.Interfaces;
+
+namespace NAudio.CoreAudioApi
+{
+ ///
+ /// Connector
+ ///
+ public class Connector
+ {
+ private readonly IConnector connectorInterface;
+
+ internal Connector(IConnector connector)
+ {
+ connectorInterface = connector;
+ }
+
+ ///
+ /// Connects this connector to a connector in another device-topology object
+ ///
+ public void ConnectTo(Connector other)
+ {
+ connectorInterface.ConnectTo(other.connectorInterface);
+ }
+
+ ///
+ /// Retreives the type of this connector
+ ///
+ public ConnectorType Type
+ {
+ get
+ {
+ connectorInterface.GetType(out var result);
+ return result;
+ }
+ }
+
+ ///
+ /// Retreives the data flow of this connector
+ ///
+ public DataFlow DataFlow
+ {
+ get
+ {
+ connectorInterface.GetDataFlow(out var result);
+ return result;
+ }
+ }
+
+ ///
+ /// Disconnects this connector from it's connected connector (if connected)
+ ///
+ public void Disconnect()
+ {
+ connectorInterface.Disconnect();
}
- public Part Part
- {
- get
- {
- return new Part(connectorInterface as IPart);
- }
- }
- }
-}
+ ///
+ /// Indicates whether this connector is connected to another connector
+ ///
+ public bool IsConnected
+ {
+ get
+ {
+ connectorInterface.IsConnected(out var result);
+ return result;
+ }
+ }
+
+ ///
+ /// Retreives the connector this connector is connected to (if connected)
+ ///
+ public Connector ConnectedTo
+ {
+ get
+ {
+ connectorInterface.GetConnectedTo(out var result);
+ return new Connector(result);
+ }
+ }
+
+ ///
+ /// Retreives the global ID of the connector this connector is connected to (if connected)
+ ///
+ public string ConnectedToConnectorId
+ {
+ get
+ {
+ connectorInterface.GetConnectorIdConnectedTo(out var result);
+ return result;
+ }
+ }
+
+ ///
+ /// Retreives the device ID of the audio device this connector is connected to (if connected)
+ ///
+ public string ConnectedToDeviceId
+ {
+ get
+ {
+ connectorInterface.GetDeviceIdConnectedTo(out var result);
+ return result;
+ }
+ }
+
+ ///
+ /// Part
+ ///
+ public Part Part
+ {
+ get
+ {
+ return new Part(connectorInterface as IPart);
+ }
+ }
+ }
+}
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceAsyncOperation.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceAsyncOperation.cs
index 7a5acadd..9d470d2c 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceAsyncOperation.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceAsyncOperation.cs
@@ -3,11 +3,17 @@
namespace NAudio.Wasapi.CoreAudioApi.Interfaces
{
+ ///
+ /// Represents an asynchronous operation activating a WASAPI interface and provides a method to retrieve the results of the activation.
+ ///
[ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("72A22D78-CDE4-431D-B8CC-843A71199B6D")]
public interface IActivateAudioInterfaceAsyncOperation
{
//virtual HRESULT STDMETHODCALLTYPE GetActivateResult(/*[out]*/ _Out_
// HRESULT *activateResult, /*[out]*/ _Outptr_result_maybenull_ IUnknown **activatedInterface) = 0;
+ ///
+ /// Gets the results of an asynchronous activation of a WASAPI interface initiated by an application calling the ActivateAudioInterfaceAsync function
+ ///
void GetActivateResult([Out] out int activateResult,
[Out, MarshalAs(UnmanagedType.IUnknown)] out object activateInterface);
}
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceCompletionHandler.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceCompletionHandler.cs
index fb8b6602..6bd203b5 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceCompletionHandler.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IActivateAudioInterfaceCompletionHandler.cs
@@ -3,11 +3,17 @@
namespace NAudio.Wasapi.CoreAudioApi.Interfaces
{
+ ///
+ /// Provides a callback to indicate that activation of a WASAPI interface is complete.
+ ///
[ComImport, InterfaceType(ComInterfaceType.InterfaceIsIUnknown), Guid("41D949AB-9862-444A-80F6-C261334DA5EB")]
public interface IActivateAudioInterfaceCompletionHandler
{
//virtual HRESULT STDMETHODCALLTYPE ActivateCompleted(/*[in]*/ _In_
// IActivateAudioInterfaceAsyncOperation *activateOperation) = 0;
+ ///
+ /// Indicates that activation of a WASAPI interface is complete and results are available.
+ ///
void ActivateCompleted(IActivateAudioInterfaceAsyncOperation activateOperation);
}
}
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioClient.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioClient.cs
index 43f63394..f5004dbe 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioClient.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioClient.cs
@@ -13,6 +13,16 @@ namespace NAudio.CoreAudioApi.Interfaces
ComImport]
public interface IAudioClient
{
+ ///
+ /// initializes the audio stream.
+ ///
+ /// The sharing mode for the connection. Through this parameter, the client tells the audio engine whether it wants to share the audio endpoint device with other clients.
+ /// Flags to control creation of the stream. The client should set this parameter to 0 or to the bitwise OR of one or more of the AUDCLNT_STREAMFLAGS_XXX Constants or the AUDCLNT_SESSIONFLAGS_XXX Constants.
+ /// The buffer capacity as a time value. This parameter is of type REFERENCE_TIME and is expressed in 100-nanosecond units. This parameter contains the buffer size that the caller requests for the buffer that the audio application will share with the audio engine (in shared mode) or with the endpoint device (in exclusive mode). If the call succeeds, the method allocates a buffer that is a least this large.
+ /// The device period. This parameter can be nonzero only in exclusive mode. In shared mode, always set this parameter to 0. In exclusive mode, this parameter specifies the requested scheduling period for successive buffer accesses by the audio endpoint device. If the requested device period lies outside the range that is set by the device's minimum period and the system's maximum period, then the method clamps the period to that range. If this parameter is 0, the method sets the device period to its default value. To obtain the default device period, call the IAudioClient::GetDevicePeriod method. If the AUDCLNT_STREAMFLAGS_EVENTCALLBACK stream flag is set and AUDCLNT_SHAREMODE_EXCLUSIVE is set as the ShareMode, then hnsPeriodicity must be nonzero and equal to hnsBufferDuration.
+ /// Pointer to a format descriptor. This parameter must point to a valid format descriptor of type WAVEFORMATEX (or WAVEFORMATEXTENSIBLE).
+ /// Pointer to a session GUID. This parameter points to a GUID value that identifies the audio session that the stream belongs to. If the GUID identifies a session that has been previously opened, the method adds the stream to that session. If the GUID does not identify an existing session, the method opens a new session and adds the stream to that session. The stream remains a member of the same session for its lifetime. Setting this parameter to NULL is equivalent to passing a pointer to a GUID_NULL value.
+ ///
[PreserveSig]
int Initialize(AudioClientShareMode shareMode,
AudioClientStreamFlags streamFlags,
@@ -26,28 +36,55 @@ int Initialize(AudioClientShareMode shareMode,
///
int GetBufferSize(out uint bufferSize);
+ ///
+ /// retrieves the maximum latency for the current stream and can be called any time after the stream has been initialized.
+ ///
[return: MarshalAs(UnmanagedType.I8)]
long GetStreamLatency();
+ ///
+ /// retrieves the number of frames of padding in the endpoint buffer.
+ ///
int GetCurrentPadding(out int currentPadding);
+ ///
+ /// Indicates whether the audio endpoint device supports a particular stream format.
+ ///
[PreserveSig]
int IsFormatSupported(
AudioClientShareMode shareMode,
[In] WaveFormat pFormat,
IntPtr closestMatchFormat); // or outIntPtr??
+ ///
+ /// retrieves the stream format that the audio engine uses for its internal processing of shared-mode streams.
+ ///
int GetMixFormat(out IntPtr deviceFormatPointer);
- // REFERENCE_TIME is 64 bit int
+ // REFERENCE_TIME is 64 bit int
+ ///
+ /// retrieves the length of the periodic interval separating successive processing passes by the audio engine on the data in the endpoint buffer.
+ ///
int GetDevicePeriod(out long defaultDevicePeriod, out long minimumDevicePeriod);
+ ///
+ /// starts the audio stream.
+ ///
int Start();
+ ///
+ /// stops the audio stream.
+ ///
int Stop();
+ ///
+ /// resets the audio stream.
+ ///
int Reset();
-
+
+ ///
+ /// sets the event handle that the system signals when an audio buffer is ready to be processed by the client.
+ ///
int SetEventHandle(IntPtr eventHandle);
///
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioVolumeLevel.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioVolumeLevel.cs
index 81ddf026..176633ab 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioVolumeLevel.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IAudioVolumeLevel.cs
@@ -1,7 +1,5 @@
using System;
-using System.Collections.Generic;
using System.Runtime.InteropServices;
-using System.Text;
namespace NAudio.CoreAudioApi.Interfaces
{
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IControlInterface.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IControlInterface.cs
index d06dd1a8..ee1c728c 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IControlInterface.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IControlInterface.cs
@@ -1,10 +1,11 @@
using System;
-using System.Collections.Generic;
using System.Runtime.InteropServices;
-using System.Text;
namespace NAudio.CoreAudioApi.Interfaces
{
+ ///
+ /// The IControlInterface interface represents a control interface on a part (connector or subunit) in a device topology. The client obtains a reference to a part's IControlInterface interface by calling the IPart::GetControlInterface method.
+ ///
[Guid("45d37c3f-5140-444a-ae24-400789f3cbf3"),
InterfaceType(ComInterfaceType.InterfaceIsIUnknown),
ComImport]
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/IPerChannelDbLevel.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/IPerChannelDbLevel.cs
index ee2f0d2a..8e1d9dc4 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/IPerChannelDbLevel.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/IPerChannelDbLevel.cs
@@ -1,7 +1,5 @@
using System;
-using System.Collections.Generic;
using System.Runtime.InteropServices;
-using System.Text;
namespace NAudio.CoreAudioApi.Interfaces
{
diff --git a/NAudio.Wasapi/CoreAudioApi/Interfaces/PartType.cs b/NAudio.Wasapi/CoreAudioApi/Interfaces/PartType.cs
index 8f7159a5..d0443f15 100644
--- a/NAudio.Wasapi/CoreAudioApi/Interfaces/PartType.cs
+++ b/NAudio.Wasapi/CoreAudioApi/Interfaces/PartType.cs
@@ -1,13 +1,37 @@
namespace NAudio.CoreAudioApi.Interfaces
{
+ ///
+ /// The PartType enumeration defines constants that indicate whether a part in a device topology is a connector or subunit.
+ ///
public enum PartTypeEnum
{
+ ///
+ /// Connector
+ ///
Connector = 0,
+ ///
+ /// Subunit
+ ///
Subunit = 1,
+ ///
+ /// Hardware Periphery
+ ///
HardwarePeriphery = 2,
+ ///
+ /// Software Driver
+ ///
SoftwareDriver = 3,
+ ///
+ /// Splitter
+ ///
Splitter = 4,
+ ///
+ /// Category
+ ///
Category = 5,
+ ///
+ /// Other
+ ///
Other = 6
}
}
diff --git a/NAudio.Wasapi/CoreAudioApi/KsJackDescription.cs b/NAudio.Wasapi/CoreAudioApi/KsJackDescription.cs
index 2a67f086..a3fb05e9 100644
--- a/NAudio.Wasapi/CoreAudioApi/KsJackDescription.cs
+++ b/NAudio.Wasapi/CoreAudioApi/KsJackDescription.cs
@@ -2,6 +2,9 @@
namespace NAudio.CoreAudioApi
{
+ ///
+ /// KS Jack Description
+ ///
public class KsJackDescription
{
private readonly IKsJackDescription ksJackDescriptionInterface;
@@ -11,6 +14,9 @@ internal KsJackDescription(IKsJackDescription ksJackDescription)
ksJackDescriptionInterface = ksJackDescription;
}
+ ///
+ /// Jack count
+ ///
public uint Count
{
get
@@ -20,6 +26,9 @@ public uint Count
}
}
+ ///
+ /// Get Jack Description by index
+ ///
public string this[uint index]
{
get
diff --git a/NAudio.Wasapi/CoreAudioApi/PartsList.cs b/NAudio.Wasapi/CoreAudioApi/PartsList.cs
index 99fc9dbc..ddef21d2 100644
--- a/NAudio.Wasapi/CoreAudioApi/PartsList.cs
+++ b/NAudio.Wasapi/CoreAudioApi/PartsList.cs
@@ -1,10 +1,11 @@
-using NAudio.CoreAudioApi.Interfaces;
-using System;
-using System.Collections.Generic;
-using System.Text;
+using System;
+using NAudio.CoreAudioApi.Interfaces;
namespace NAudio.CoreAudioApi
{
+ ///
+ /// Parts List
+ ///
public class PartsList
{
private IPartsList partsListInterface;
@@ -14,6 +15,9 @@ internal PartsList(IPartsList partsList)
partsListInterface = partsList;
}
+ ///
+ /// Part count
+ ///
public uint Count
{
get
@@ -28,6 +32,9 @@ public uint Count
}
}
+ ///
+ /// Get part by index
+ ///
public Part this[uint index]
{
get
diff --git a/NAudio.Wasapi/MediaFoundationEncoder.cs b/NAudio.Wasapi/MediaFoundationEncoder.cs
index 7663df17..06d4d387 100644
--- a/NAudio.Wasapi/MediaFoundationEncoder.cs
+++ b/NAudio.Wasapi/MediaFoundationEncoder.cs
@@ -193,6 +193,9 @@ public static MediaType SelectMediaType(Guid audioSubtype, WaveFormat inputForma
.FirstOrDefault();
}
+ ///
+ /// Default read buffer size
+ ///
public int DefaultReadBufferSize { get; set; }
private readonly MediaType outputMediaType;
private bool disposed;
diff --git a/NAudio.Wasapi/NAudio.Wasapi.csproj b/NAudio.Wasapi/NAudio.Wasapi.csproj
index b36e25a4..d0b00671 100644
--- a/NAudio.Wasapi/NAudio.Wasapi.csproj
+++ b/NAudio.Wasapi/NAudio.Wasapi.csproj
@@ -2,7 +2,7 @@
netstandard2.0;uap10.0.18362
- 2.2.1
+ 2.2.1-SA
true
true
Mark Heath
diff --git a/NAudio.WinForms/NAudio.WinForms.csproj b/NAudio.WinForms/NAudio.WinForms.csproj
index a07760ad..43c5a392 100644
--- a/NAudio.WinForms/NAudio.WinForms.csproj
+++ b/NAudio.WinForms/NAudio.WinForms.csproj
@@ -3,7 +3,7 @@
net472;netcoreapp3.1
true
- 2.2.1
+ 2.2.1-SA
true
true
Mark Heath
diff --git a/NAudio.WinMM/Compression/AcmStream.cs b/NAudio.WinMM/Compression/AcmStream.cs
index db84b7fd..1d01d831 100644
--- a/NAudio.WinMM/Compression/AcmStream.cs
+++ b/NAudio.WinMM/Compression/AcmStream.cs
@@ -175,6 +175,11 @@ public int Convert(int bytesToConvert, out int sourceBytesConverted)
bytesToConvert -= (bytesToConvert % sourceFormat.BlockAlign);
}
+ if (streamHeader == null)
+ {
+ throw new ObjectDisposedException("AcmStream has already been disposed");
+ }
+
return streamHeader.Convert(bytesToConvert, out sourceBytesConverted);
}
diff --git a/NAudio.WinMM/MmeInterop/MmTime.cs b/NAudio.WinMM/MmeInterop/MmTime.cs
index 6c535d12..ad44342f 100644
--- a/NAudio.WinMM/MmeInterop/MmTime.cs
+++ b/NAudio.WinMM/MmeInterop/MmTime.cs
@@ -10,36 +10,87 @@ namespace NAudio.Wave
[StructLayout(LayoutKind.Explicit)]
public struct MmTime
{
+ ///
+ /// Time in milliseconds.
+ ///
public const int TIME_MS = 0x0001;
+ ///
+ /// Number of waveform-audio samples.
+ ///
public const int TIME_SAMPLES = 0x0002;
+ ///
+ /// Current byte offset from beginning of the file.
+ ///
public const int TIME_BYTES = 0x0004;
+ ///
+ /// Time format.
+ ///
[FieldOffset(0)]
public UInt32 wType;
+ ///
+ /// Number of milliseconds. Used when wType is TIME_MS.
+ ///
[FieldOffset(4)]
public UInt32 ms;
+ ///
+ /// Number of samples. Used when wType is TIME_SAMPLES.
+ ///
[FieldOffset(4)]
public UInt32 sample;
+ ///
+ /// Byte count. Used when wType is TIME_BYTES.
+ ///
[FieldOffset(4)]
public UInt32 cb;
+ ///
+ /// Ticks in MIDI stream. Used when wType is TIME_TICKS.
+ ///
[FieldOffset(4)]
public UInt32 ticks;
+ ///
+ /// SMPTE time structure - hours. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(4)]
public Byte smpteHour;
+ ///
+ /// SMPTE time structure - minutes. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(5)]
public Byte smpteMin;
+ ///
+ /// SMPTE time structure - seconds. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(6)]
public Byte smpteSec;
+ ///
+ /// SMPTE time structure - frames. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(7)]
public Byte smpteFrame;
+ ///
+ /// SMPTE time structure - frames per second. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(8)]
public Byte smpteFps;
+ ///
+ /// SMPTE time structure - dummy byte for alignment. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(9)]
public Byte smpteDummy;
+ ///
+ /// SMPTE time structure - padding. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(10)]
public Byte smptePad0;
+ ///
+ /// SMPTE time structure - padding. Used when wType is TIME_SMPTE.
+ ///
[FieldOffset(11)]
public Byte smptePad1;
+ ///
+ /// MIDI time structure. Used when wType is TIME_MIDI.
+ ///
[FieldOffset(4)]
public UInt32 midiSongPtrPos;
}
diff --git a/NAudio.WinMM/MmeInterop/WaveInCapabilities.cs b/NAudio.WinMM/MmeInterop/WaveInCapabilities.cs
index f52b5052..bf1492ef 100644
--- a/NAudio.WinMM/MmeInterop/WaveInCapabilities.cs
+++ b/NAudio.WinMM/MmeInterop/WaveInCapabilities.cs
@@ -96,10 +96,22 @@ public bool SupportsWaveFormat(SupportedWaveFormat waveFormat)
}
+ ///
+ /// Wave Capabilities Helpers
+ ///
public static class WaveCapabilitiesHelpers
{
+ ///
+ /// Microsoft default manufacturer id
+ ///
public static readonly Guid MicrosoftDefaultManufacturerId = new Guid("d5a47fa8-6d98-11d1-a21a-00a0c9223196");
+ ///
+ /// Default wave out guid
+ ///
public static readonly Guid DefaultWaveOutGuid = new Guid("E36DC310-6D9A-11D1-A21A-00A0C9223196");
+ ///
+ /// Default wave in guid
+ ///
public static readonly Guid DefaultWaveInGuid = new Guid("E36DC311-6D9A-11D1-A21A-00A0C9223196");
///
diff --git a/NAudio.WinMM/MmeInterop/WaveInterop.cs b/NAudio.WinMM/MmeInterop/WaveInterop.cs
index ca0a7063..53c303a0 100644
--- a/NAudio.WinMM/MmeInterop/WaveInterop.cs
+++ b/NAudio.WinMM/MmeInterop/WaveInterop.cs
@@ -8,6 +8,9 @@ namespace NAudio.Wave
///
public class WaveInterop
{
+ ///
+ /// WaveInOut Open Flags
+ ///
[Flags]
public enum WaveInOutOpenFlags
{
@@ -46,6 +49,9 @@ public enum WaveInOutOpenFlags
//public const int TIME_SAMPLES = 0x0002; // number of wave samples
//public const int TIME_BYTES = 0x0004; // current byte offset
+ ///
+ /// Wave Message
+ ///
public enum WaveMessage
{
///
@@ -78,89 +84,179 @@ public enum WaveMessage
// use the userdata as a reference
// WaveOutProc http://msdn.microsoft.com/en-us/library/dd743869%28VS.85%29.aspx
// WaveInProc http://msdn.microsoft.com/en-us/library/dd743849%28VS.85%29.aspx
+ ///
+ /// Wave Callback
+ ///
public delegate void WaveCallback(IntPtr hWaveOut, WaveMessage message, IntPtr dwInstance, WaveHeader wavhdr, IntPtr dwReserved);
+ ///
+ /// Convert a mmio string to FOURCC
+ ///
[DllImport("winmm.dll")]
public static extern Int32 mmioStringToFOURCC([MarshalAs(UnmanagedType.LPStr)] String s, int flags);
+ ///
+ /// Get number of WaveOut devices
+ ///
[DllImport("winmm.dll")]
public static extern Int32 waveOutGetNumDevs();
+ ///
+ /// Prepare wave out header
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutPrepareHeader(IntPtr hWaveOut, WaveHeader lpWaveOutHdr, int uSize);
+ ///
+ /// Unprepare WaveOut header
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutUnprepareHeader(IntPtr hWaveOut, WaveHeader lpWaveOutHdr, int uSize);
+ ///
+ /// Write to WaveOut device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutWrite(IntPtr hWaveOut, WaveHeader lpWaveOutHdr, int uSize);
// http://msdn.microsoft.com/en-us/library/dd743866%28VS.85%29.aspx
+ ///
+ /// Open WaveOut Device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutOpen(out IntPtr hWaveOut, IntPtr uDeviceID, WaveFormat lpFormat, WaveCallback dwCallback, IntPtr dwInstance, WaveInOutOpenFlags dwFlags);
+
+ ///
+ /// Open WaveOut Device with window callback
+ ///
[DllImport("winmm.dll", EntryPoint = "waveOutOpen")]
public static extern MmResult waveOutOpenWindow(out IntPtr hWaveOut, IntPtr uDeviceID, WaveFormat lpFormat, IntPtr callbackWindowHandle, IntPtr dwInstance, WaveInOutOpenFlags dwFlags);
+
+ ///
+ /// Reset WaveOut device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutReset(IntPtr hWaveOut);
+
+ ///
+ /// Close WaveOut device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutClose(IntPtr hWaveOut);
+
+ ///
+ /// Pause WaveOut device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutPause(IntPtr hWaveOut);
+
+ ///
+ /// Restart WaveOut device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutRestart(IntPtr hWaveOut);
// http://msdn.microsoft.com/en-us/library/dd743863%28VS.85%29.aspx
+ ///
+ /// Get WaveOut device position
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutGetPosition(IntPtr hWaveOut, ref MmTime mmTime, int uSize);
// http://msdn.microsoft.com/en-us/library/dd743874%28VS.85%29.aspx
+ ///
+ /// Set WaveOut device volume
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutSetVolume(IntPtr hWaveOut, int dwVolume);
+ ///
+ /// Get WaveOut device volume
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveOutGetVolume(IntPtr hWaveOut, out int dwVolume);
// http://msdn.microsoft.com/en-us/library/dd743857%28VS.85%29.aspx
+ ///
+ /// Get WaveOut device capabilities
+ ///
[DllImport("winmm.dll", CharSet = CharSet.Auto)]
public static extern MmResult waveOutGetDevCaps(IntPtr deviceID, out WaveOutCapabilities waveOutCaps, int waveOutCapsSize);
+ ///
+ /// Get number of WaveIn devices
+ ///
[DllImport("winmm.dll")]
public static extern Int32 waveInGetNumDevs();
// http://msdn.microsoft.com/en-us/library/dd743841%28VS.85%29.aspx
+ ///
+ /// Get WaveIn Device capabilities
+ ///
[DllImport("winmm.dll", CharSet = CharSet.Auto)]
public static extern MmResult waveInGetDevCaps(IntPtr deviceID, out WaveInCapabilities waveInCaps, int waveInCapsSize);
// http://msdn.microsoft.com/en-us/library/dd743838%28VS.85%29.aspx
+ ///
+ /// Add WaveIn Buffer
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInAddBuffer(IntPtr hWaveIn, WaveHeader pwh, int cbwh);
+ ///
+ /// Close WaveIn device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInClose(IntPtr hWaveIn);
// http://msdn.microsoft.com/en-us/library/dd743847%28VS.85%29.aspx
+ ///
+ /// Open WaveIn Device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInOpen(out IntPtr hWaveIn, IntPtr uDeviceID, WaveFormat lpFormat, WaveCallback dwCallback, IntPtr dwInstance, WaveInOutOpenFlags dwFlags);
+
+ ///
+ /// Open WaveIn Device with Window callbacks
+ ///
[DllImport("winmm.dll", EntryPoint = "waveInOpen")]
public static extern MmResult waveInOpenWindow(out IntPtr hWaveIn, IntPtr uDeviceID, WaveFormat lpFormat, IntPtr callbackWindowHandle, IntPtr dwInstance, WaveInOutOpenFlags dwFlags);
// http://msdn.microsoft.com/en-us/library/dd743848%28VS.85%29.aspx
+ ///
+ /// Prepare WaveIn Header
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInPrepareHeader(IntPtr hWaveIn, WaveHeader lpWaveInHdr, int uSize);
+ ///
+ /// Unprepare WaveIn Header
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInUnprepareHeader(IntPtr hWaveIn, WaveHeader lpWaveInHdr, int uSize);
// http://msdn.microsoft.com/en-us/library/dd743850%28VS.85%29.aspx
+ ///
+ /// Reset WaveIn Device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInReset(IntPtr hWaveIn);
// http://msdn.microsoft.com/en-us/library/dd743851%28VS.85%29.aspx
+ ///
+ /// Start WaveIn device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInStart(IntPtr hWaveIn);
// http://msdn.microsoft.com/en-us/library/dd743852%28VS.85%29.aspx
+ ///
+ /// Stop WaveIn Device
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInStop(IntPtr hWaveIn);
// https://msdn.microsoft.com/en-us/library/Dd743845(v=VS.85).aspx
+ ///
+ /// Get WaveIn Device Position
+ ///
[DllImport("winmm.dll")]
public static extern MmResult waveInGetPosition(IntPtr hWaveIn, out MmTime mmTime, int uSize);
diff --git a/NAudio.WinMM/NAudio.WinMM.csproj b/NAudio.WinMM/NAudio.WinMM.csproj
index 75503e42..54b30b80 100644
--- a/NAudio.WinMM/NAudio.WinMM.csproj
+++ b/NAudio.WinMM/NAudio.WinMM.csproj
@@ -2,7 +2,7 @@
netstandard2.0
- 2.2.1
+ 2.2.1-SA
true
true
true
diff --git a/NAudio.WinMM/WaveOutUtils.cs b/NAudio.WinMM/WaveOutUtils.cs
index 1d590929..ea8bc82e 100644
--- a/NAudio.WinMM/WaveOutUtils.cs
+++ b/NAudio.WinMM/WaveOutUtils.cs
@@ -4,8 +4,14 @@
// ReSharper disable once CheckNamespace
namespace NAudio.Wave
{
+ ///
+ /// WaveOutUtils
+ ///
public static class WaveOutUtils
{
+ ///
+ /// Get WaveOut Volume
+ ///
public static float GetWaveOutVolume(IntPtr hWaveOut, object lockObject)
{
int stereoVolume;
@@ -18,6 +24,9 @@ public static float GetWaveOutVolume(IntPtr hWaveOut, object lockObject)
return (stereoVolume & 0xFFFF) / (float)0xFFFF;
}
+ ///
+ /// Set WaveOut Volume
+ ///
public static void SetWaveOutVolume(float value, IntPtr hWaveOut, object lockObject)
{
if (value < 0) throw new ArgumentOutOfRangeException(nameof(value), "Volume must be between 0.0 and 1.0");
@@ -34,6 +43,9 @@ public static void SetWaveOutVolume(float value, IntPtr hWaveOut, object lockObj
MmException.Try(result, "waveOutSetVolume");
}
+ ///
+ /// Get position in bytes
+ ///
public static long GetPositionBytes(IntPtr hWaveOut, object lockObject)
{
lock (lockObject)
diff --git a/NAudio/NAudio.csproj b/NAudio/NAudio.csproj
index f204c07d..bccc6a95 100644
--- a/NAudio/NAudio.csproj
+++ b/NAudio/NAudio.csproj
@@ -1,7 +1,7 @@
net472;netcoreapp3.1;net6.0-windows;net6.0
- 2.2.1
+ 2.2.1-SA
Mark Heath & Contributors
true
NAudio, an audio library for .NET
diff --git a/NAudioTests/WaveStreams/FadeInOutSampleProviderTests.cs b/NAudioTests/WaveStreams/FadeInOutSampleProviderTests.cs
index 9192ae66..310e2c12 100644
--- a/NAudioTests/WaveStreams/FadeInOutSampleProviderTests.cs
+++ b/NAudioTests/WaveStreams/FadeInOutSampleProviderTests.cs
@@ -120,5 +120,61 @@ public void BufferIsZeroedAfterFadeOut()
Assert.AreEqual(20, read);
Assert.AreEqual(0, buffer[0]);
}
+
+ [Test]
+ public void FadeInCompleteInvoked()
+ {
+ // given
+ var source = new TestSampleProvider(10, 1); // 10 samples per second
+ source.UseConstValue = true;
+ source.ConstValue = 100;
+ var fade = new FadeInOutSampleProvider(source);
+ var fadeInsCount = 0;
+ fade.FadeInComplete += (sender, e) =>
+ {
+ fadeInsCount++;
+ };
+
+ // when
+ fade.BeginFadeIn(1000);
+
+ // then
+ float[] buffer = new float[20];
+ int read = fade.Read(buffer, 0, 20);
+ Assert.AreEqual(20, read);
+ Assert.AreEqual(0, buffer[0]); // start of fade-in
+ Assert.AreEqual(50, buffer[5]); // half-way
+ Assert.AreEqual(100, buffer[10]); // fully fade in
+ Assert.AreEqual(100, buffer[15]); // fully fade in
+ Assert.AreEqual(1, fadeInsCount); // we want one-shot event (when fade in was completed once)
+ }
+
+ [Test]
+ public void FadeOutCompleteInvoked()
+ {
+ // given
+ var source = new TestSampleProvider(10, 1); // 10 samples per second
+ source.UseConstValue = true;
+ source.ConstValue = 100;
+ var fade = new FadeInOutSampleProvider(source);
+ var fadeOutsCount = 0;
+ fade.FadeOutComplete += (sender, e) =>
+ {
+ fadeOutsCount++;
+ };
+
+ // when
+ fade.BeginFadeOut(1000);
+
+ // then
+ float[] buffer = new float[20];
+ int read = fade.Read(buffer, 0, 20);
+ Assert.AreEqual(20, read);
+ Assert.AreEqual(100, buffer[0]); // start of fade-out
+ Assert.AreEqual(50, buffer[5]); // half-way
+ Assert.AreEqual(0, buffer[10]); // fully fade out
+ Assert.AreEqual(0, buffer[15]); // fully fade out
+ Assert.AreEqual(1, fadeOutsCount); // we want one-shot event (when fade out was completed once)
+ }
}
}
diff --git a/SA_version.sh b/SA_version.sh
new file mode 100644
index 00000000..9d0c46fa
--- /dev/null
+++ b/SA_version.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/bash
+FILELIST=$(grep -rn '2.2.1' | cut -f 1 -d ':')
+for FILE in $FILELIST; do
+ echo ">>> $FILE"
+ sed 's/2.2.1<\/Version>/2.2.1-SA<\/Version>/g' -i $FILE
+done