1
0
mirror of https://github.com/cookiengineer/audacity synced 2025-04-30 15:49:41 +02:00

Update portaudio to upstream r1554, including ALSA and WASPI fixes.

This commit is contained in:
richardash1981 2010-11-12 16:58:01 +00:00
parent aa33613211
commit e72194d8d4
12 changed files with 371 additions and 110 deletions

View File

@ -97,7 +97,7 @@ EXCLUDE_PATTERNS =
EXAMPLE_PATH =
EXAMPLE_PATTERNS =
EXAMPLE_RECURSIVE = NO
IMAGE_PATH =
IMAGE_PATH = doc/src/images
INPUT_FILTER =
FILTER_PATTERNS =
FILTER_SOURCE_FILES = NO

View File

@ -363,6 +363,8 @@ case "${host_os}" in
esac
CFLAGS="$CFLAGS $THREAD_CFLAGS"
test "$enable_shared" != "yes" && SHARED_FLAGS=""
if test "$enable_cxx" = "yes"; then
AC_CONFIG_SUBDIRS([bindings/cpp])
ENABLE_CXX_TRUE=""

View File

@ -0,0 +1,162 @@
/** @page api_overview PortAudio API Overview
This page provides a top-down overview of the entire PortAudio API. It shows how all of the PortAudio data types and functions fit together. It provides links to the documentation for each function and data type. You can find all of the detailed documentation for each API function and data type on the portaudio.h page.
@section introduction Introduction
PortAudio provides a uniform application programming interface (API) across all supported platforms. You can think of the PortAudio library as a wrapper that converts calls to the PortAudio API into calls to platform-specific native audio APIs. Operating systems often offer more than one native audio API and some APIs (such as JACK) may be available on multiple target operating systems. PortAudio supports all the major native audio APIs on each supported platform. The diagram below illustrates the relationship between your application, PortAudio, and the supported native audio APIs:
@image html portaudio-external-architecture-diagram.png
PortAudio provides a uniform interface to native audio APIs. However, it doesn't always provide totally uniform functionality. There are cases where PortAudio is limited by the capabilities of the underlying native audio API. For example, PortAudio doesn't provide sample rate conversion if you request a sample rate that is not supported by the native audio API. Another example is that the ASIO SDK only allows one device to be open at a time, so PortAudio/ASIO doesn't currently support opening multiple ASIO devices simultaneously.
@section key_abstractions Key abstractions: Host APIs, Devices and Streams
The PortAudio processing model includes three main abstractions: <i>Host APIs</i>, audio <i>Devices</i> and audio <i>Streams</i>.
<i>Host APIs</i> represent platform-specific native audio APIs. Some examples of Host APIs are Core Audio on Mac OS, WMME and DirectSound on Windows and OSS and ALSA on Linux. The diagram in the previous section shows many of the supported native APIs. Sometimes it's useful to know which Host APIs you're dealing with, but it is easy to use PortAudio without ever interacting directly with the Host API abstraction.
<i>Devices</i> represent individual hardware audio interfaces or audio ports on the host platform. Devices have names and certain capabilities such as supported sample rates and the number of supported input and output channels. PortAudio provides functions to enumerate available Devices and to query for Device capabilities.
<i>Streams</i> manage active audio input and output from and to Devices. Streams may be half duplex (input or output) or full duplex (simultaneous input and output). Streams operate at a specific sample rate with particular sample formats, buffer sizes and internal buffering latencies. You specify these parameters when you open the Stream. Audio data is communicated between a Stream and your application via a user provided asynchronous callback function or by invoking synchronous read and write functions.
PortAudio supports audio input and output in a variety of sample formats: 8, 16, 24 and 32 bit integer formats and 32 bit floating point, irrespective of the formats supported by the native audio API. PortAudio also supports multichannel buffers in both interleaved and non-interleaved (separate buffer per channel) formats and automatically performs conversion when necessary. If requested, PortAudio can clamp out-of range samples and/or dither to a native format.
The PortAudio API offers the following functionality:
- Initialize and terminate the library
- Enumerate available Host APIs
- Enumerate available Devices either globally, or within each Host API
- Discover default or recommended Devices and Device settings
- Discover Device capabilities such as supported audio data formats and sample rates
- Create and control audio Streams to acquire audio from and output audio to Devices
- Provide Stream timing information to support synchronising audio with other parts of your application
- Retrieve version and error information.
These functions are described in more detail in the sections that follow.
@section top_level_functions Initialization, termination and utility functions
The PortAudio library must be initialized before it can be used and terminated to clean up afterwards. You initialize PortAudio by calling Pa_Initialize() and clean up by calling Pa_Terminate().
You can query PortAudio for version information using Pa_GetVersion() to get a numeric version number and Pa_GetVersionText() to get a string.
The size in bytes of the various sample formats represented by the @ref PaSampleFormat enumeration can be obtained using Pa_GetSampleSize().
Pa_Sleep() sleeps for a specified number of milliseconds. This isn't intended for use in production systems; it's provided only as a simple portable way to implement tests and examples where the main thread sleeps while audio is acquired or played by an asynchronous callback function.
@section host_apis Host APIs
A Host API acts as a top-level grouping for all of the Devices offered by a single native platform audio API. Each Host API has a unique type identifier, a name, zero or more Devices, and nominated default input and output Devices.
Host APIs are usually referenced by index: an integer of type @ref PaHostApiIndex that ranges between zero and Pa_GetHostApiCount() - 1. You can enumerate all available Host APIs by counting across this range.
You can retrieve the index of the default Host API by calling Pa_GetDefaultHostApi().
Information about a Host API, such as it's name and default devices, is stored in a @ref PaHostApiInfo structure. You can retrieve a pointer to a particular Host API's @ref PaHostApiInfo structure by calling Pa_GetHostApiInfo() with the Host API's index as a parameter.
Most PortAudio functions reference Host APIs by @ref PaHostApiIndex indices. Each Host API also has a unique type identifier defined in the @ref PaHostApiTypeId enumeration.
You can call Pa_HostApiTypeIdToHostApiIndex() to retrieve the current @ref PaHostApiIndex for a particular @ref PaHostApiTypeId.
@section devices Devices
A Device represents an audio endpoint provided by a particular native audio API. This usually corresponds to a specific input or output port on a hardware audio interface, or to the interface as a whole. Each Host API operates independently, so a single physical audio port may be addressable via different Devices exposed by different Host APIs.
A Device has a name, is associated with a Host API, and has a maximum number of supported input and output channels. PortAudio provides recommended default latency values and a default sample rate for each Device. To obtain more detailed information about device capabilities you can call Pa_IsFormatSupported() to query whether it is possible to open a Stream using particular Devices, parameters and sample rate.
Although each Device conceptually belongs to a specific Host API, most PortAudio functions and data structures refer to Devices using a global, Host API-independent index of type @ref PaDeviceIndex &ndash; an integer of that ranges between zero and Pa_GetDeviceCount() - 1. The reasons for this are partly historical but it also makes it easy for applications to ignore the Host API abstraction and just work with Devices and Streams.
If you want to enumerate Devices belonging to a particular Host API you can count between 0 and PaHostApiInfo::deviceCount - 1. You can convert this Host API-specific index value to a global @ref PaDeviceIndex value by calling Pa_HostApiDeviceIndexToDeviceIndex().
Information about a Device is stored in a @ref PaDeviceInfo structure. You can retrieve a pointer to a Devices's @ref PaDeviceInfo structure by calling Pa_GetDeviceInfo() with the Device's index as a parameter.
You can retrieve the indices of the global default input and output devices using Pa_GetDefaultInputDevice() and Pa_GetDefaultOutputDevice(). Default Devices for each Host API are stored in the Host API's @ref PaHostApiInfo structures.
For an example of enumerating devices and printing information about their capabilities see the pa_devs.c program in the test directory of the PortAudio distribution.
@section streams Streams
A Stream represents an active flow of audio data between your application and one or more audio Devices. A Stream operates at a specific sample rate with specific sample formats and buffer sizes.
@subsection io_methods I/O Methods: callback and read/write
PortAudio offers two methods for communicating audio data between an open Stream and your Application: (1) an asynchronous callback interface, where PortAudio calls a user defined callback function when new audio data is available or required, and (2) synchronous read and write functions which can be used in a blocking or non-blocking manner. You choose between the two methods when you open a Stream. The two methods are discussed in more detail below.
@subsection opening_and_closing_streams Opening and Closing Streams
You call Pa_OpenStream() to open a Stream, specifying the Device(s) to use, the number of input and output channels, sample formats, suggested latency values and flags that control dithering, clipping and overflow handling. You specify many of these parameters in two PaStreamParameters structures, one for input and one for output. If you're using the callback I/O method you also pass a callback buffer size, callback function pointer and user data pointer.
Devices may be full duplex (supporting simultaneous input and output) or half duplex (supporting input or output) &ndash; usually this reflects the structure of the underlying native audio API. When opening a Stream you can specify one full duplex Device for both input and output, or two different Devices for input and output. Some Host APIs only support full-duplex operation with a full-duplex device (e.g. ASIO) but most are able to aggregate two half duplex devices into a full duplex Stream. PortAudio requires that all devices specified in a call to Pa_OpenStream() belong to the same Host API.
A successful call to Pa_OpenStream() creates a pointer to a @ref PaStream &ndash; an opaque handle representing the open Stream. All PortAudio API functions that operate on open Streams take a pointer to a @ref PaStream as their first parameter.
PortAudio also provides Pa_OpenDefaultStream() &ndash; a simpler alternative to Pa_OpenStream() which you can use when you want to open the default audio Device(s) with default latency parameters.
You call Pa_CloseStream() to close a Stream when you've finished using it.
@subsection starting_and_stopping_streams Starting and Stopping Streams
Newly opened Streams are initially stopped. You call Pa_StartStream() to start a Stream. You can stop a running Stream using Pa_StopStream() or Pa_AbortStream() (the Stop function plays out all internally queued audio data, while Abort tries to stop as quickly as possible). An open Stream can be started and stopped multiple times. You can call Pa_IsStreamStopped() to query whether a Stream is running or stopped.
By calling Pa_SetStreamFinishedCallback() it is possible to register a special @ref PaStreamFinishedCallback that will be called when the Stream has completed playing any internally queued buffers. This can be used in conjunction with the @ref paComplete stream callback return value (see below) to avoid blocking on a call to Pa_StopStream() while queued audio data is still playing.
@subsection callback_io_method The Callback I/O Method
So-called 'callback Streams' operate by periodically invoking a callback function you supply to Pa_OpenStream(). The callback function must implement the @ref PaStreamCallback signature. It gets called by PortAudio every time PortAudio needs your application to consume or produce audio data. The callback is passed pointers to buffers containing the audio to process. The format (interleave, sample data type) and size of these buffers is determined by the parameters passed to Pa_OpenStream() when the Stream was opened.
Stream callbacks usually return @ref paContinue to indicate that PortAudio should keep the stream running. It is possible to deactivate a Stream from the stream callback by returning either @ref paComplete or @ref paAbort. In this case the Stream enters a deactivated state after the last buffer has finished playing (@ref paComplete) or as soon as possible (@ref paAbort). You can detect the deactivated state by calling Pa_IsStreamActive() or by using Pa_SetStreamFinishedCallback() to subscribe to a stream finished notification. Note that even if the stream callback returns @ref paComplete it's still necessary to call Pa_StopStream() or Pa_AbortStream() to enter the stopped state.
Many of the tests in the /tests directory of the PortAudio distribution implement PortAudio stream callbacks. For example see: patest_sine.c (audio output), patest_record.c (audio input), patest_wire.c (audio pass-through) and pa_fuzz.c (simple audio effects processing).
<strong>IMPORTANT:</strong> The stream callback function often needs to operate with very high or real-time priority. As a result there are strict requirements placed on the type of code that can be executed in a stream callback. In general this means avoiding any code that might block, including: acquiring locks, calling OS API functions including allocating memory. With the exception of Pa_GetStreamCpuLoad() you may not call PortAudio API functions from within the stream callback.
@subsection read_write_io_method The Read/Write I/O Method
As an alternative to the callback I/O method, PortAudio provides a synchronous read/write interface for acquiring and playing audio. This can be useful for applications that don't require the lowest possibly latency, or don't warrant the increased complexity of synchronising with an asynchronous callback funciton. This I/O method is also useful when calling PortAudio from programming languages that don't support asynchronous callbacks.
To open a Stream in read/write mode you pass a NULL stream callback function pointer to Pa_OpenStream().
To write audio data to a Stream call Pa_WriteStream() and to read data call Pa_ReadStream(). These functions will block if the internal buffers are full, making them safe to call in a tight loop. If you want to avoid blocking you can query the amount of available read or write space using Pa_GetStreamReadAvailable() or Pa_GetStreamWriteAvailable() and use the returned values to limit the amount of data you read or write.
For examples of the read/write I/O method see the following examples in the /tests directory of the PortAudio distribution: patest_read_record.c (audio input), patest_write_sine.c (audio output), patest_read_write_wire.c (audio pass-through).
@subsection stream_info Retrieving Stream Information
You can retrieve information about an open Stream by calling Pa_GetStreamInfo(). This returns a @ref PaStreamInfo structure containing the actual input and output latency and sample rate of the stream. It's possible for these values to be different from the suggested values passed to Pa_OpenStream().
When using a callback stream you can call Pa_GetStreamCpuLoad() to retrieve a rough estimate of the amount of CPU time your callback function is using.
@subsection stream_timing Stream Timing Information
When using the callback I/O method your stream callback function receives timing information via a pointer to a PaStreamCallbackTimeInfo structure. This structure contains the current time along with the estimated hardware capture and playback time of the first sample of the input and output buffers. All times are measured in seconds relative to a Stream-specific clock. The current Stream clock time can be retrieved using Pa_GetStreamTime().
You can use the stream callback @ref PaStreamCallbackTimeInfo times in conjunction with timestamps returned by Pa_GetStreamTime() to implement time synchronization schemes such as time aligning your GUI display with rendered audio, or maintaining synchronization between MIDI and audio playback.
@section error_handling Error Handling
Most PortAudio functions return error codes using values from the @ref PaError enumeration. All error codes are negative values. Some functions return values greater than or equal to zero for normal results and a negative error code in case of error.
You can convert @ref PaError error codes to human readable text by calling Pa_GetErrorText().
PortAudio usually tries to translate error conditions into portable @ref PaError error codes. However if an unexpected error is encountered the @ref paUnanticipatedHostError code may be returned. In this case a further mechanism is provided to query for Host API-specific error information. If PortAudio returns @ref paUnanticipatedHostError you can call Pa_GetLastHostErrorInfo() to retrieve a pointer to a @ref PaHostErrorInfo structure that provides more information, including the Host API that encountered the error, a native API error code and error text.
@section host_api_extensions Host API and Platform-specific Extensions
The public PortAudio API only exposes functionality that can be provided across all target platforms. In some cases individual native audio APIs offer unique functionality. Some PortAudio Host APIs expose this functionality via Host API-specific extensions. Examples include access to low-level buffering and priority parameters, opening a Stream with only a subset of a Device's channels, or accessing channel metadata such as channel names.
Host API-specific extensions are provided in the form of additional functions and data structures defined in Host API-specific header files found in the /include directory.
The @ref PaStreamParameters structure passed to Pa_IsFormatSupported() and Pa_OpenStream() has a field named @ref PaStreamParameters::hostApiSpecificStreamInfo that is sometimes used to pass low level information when opening a Stream.
See the documentation for the individual Host API-specific header files for details of the functionality they expose:
- pa_asio.h
- pa_jack.h
- pa_linux_alsa.h
- pa_mac_core.h
- pa_win_ds.h
- pa_win_wasapi.h
- pa_win_wmme.h
- pa_win_waveformat.h
*/

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

View File

@ -1,38 +1,27 @@
/* doxygen index page */
/** @mainpage
PortAudio is an open-source cross-platform C library for audio input
and output. It is designed to simplify the porting of audio applications
between various platforms, and also to simplify the development of audio
software in general by hiding the complexities of device interfacing.
PortAudio is a cross-platform, open-source C language library for real-time audio input and output. The library provides functions that allow your software to acquire and output real-time audio streams from your computer's hardware audio interfaces. It is designed to simplify writing cross-platform audio applications, and also to simplify the development of audio software in general by hiding the complexities of dealing directly with each native audio API. PortAudio is used to implement sound recording, editing and mixing applications, software synthesizers, effects processors, music players, internet telephony applications, software defined radios and more. Supported platforms include MS Windows, Mac OS X and Linux. Third-party language bindings make it possible to call PortAudio from other programming languages including C++, C#, Python, PureBasic, FreePascal and Lazarus.
See the PortAudio website for further information http://www.portaudio.com
This documentation pertains to PortAudio V19, API version 2.0 which is
currently under development. API version 2.0 differs in a number of ways from
previous versions, please consult the enhancement proposals for further details:
http://www.portaudio.com/docs/proposals/index.html
Read the @ref api_overview for a top-down view of the PortAudio API, its capabilities, functions and data structures. The documentation for PortAudio's main header file portaudio.h details the individual data types and functions that make up the API.
This documentation is under construction. Things you might be interested in
include:
- The PortAudio API 2.0, as documented in portaudio.h
- Tutorials for the V19 API, currently housed on the PortAudio Wiki:
To get started writing code check out the tutorials on the PortAudio Wiki:
http://www.portaudio.com/trac/wiki/TutorialDir/TutorialStart
- Implementation status is documented here:
http://www.portaudio.com/docs/proposals/status.html
This documentation covers the current API version: PortAudio V19, API version 2.0. API 2.0 differs in a number of ways from previous versions (most often encountered in PortAudio V18), please consult the enhancement proposals for details of what was added/changed for V19:
http://www.portaudio.com/docs/proposals/index.html
You might also be interested in:
- @ref srcguide
- The @ref License
If you're interested in contributing to PortAudio, you may be interested in:
- The doxygen generated <a href="todo.html">TODO List</a>. Feel free to pick an item off TODO list
and fix/implement it. You may want to enquire about status on the PortAudio mailing list first.
- Our mailing list for users and developers:
http://music.columbia.edu/mailman/listinfo/portaudio/
- Our issue tracking system:
http://www.portaudio.com/trac
@ -40,5 +29,6 @@ http://www.portaudio.com/trac
- Coding guidelines:
http://www.portaudio.com/docs/proposals/014-StyleGuide.html
If you're interested in helping out with PortAudio development, you may be interested in the doxygen generated <a href="todo.html">TODO List</a>. Feel free to pick an item off TODO list and fix/implement it. You may want to enquire about status on the PortAudio mailing list first.
*/

View File

@ -1,7 +1,7 @@
#ifndef PORTAUDIO_H
#define PORTAUDIO_H
/*
* $Id: portaudio.h,v 1.10 2008-12-31 15:38:31 richardash1981 Exp $
* $Id: portaudio.h 1542 2010-10-14 03:34:34Z rossb $
* PortAudio Portable Real-Time Audio Library
* PortAudio API Header File
* Latest version available at: http://www.portaudio.com/
@ -419,8 +419,10 @@ typedef double PaTime;
paUInt8 is an unsigned 8 bit format where 128 is considered "ground"
The paNonInterleaved flag indicates that a multichannel buffer is passed
as a set of non-interleaved pointers.
The paNonInterleaved flag indicates that audio data is passed as an array
of pointers to separate buffers, one buffer for each channel. Usually,
when this flag is not used, audio data is passed as a single buffer with
all channels interleaved.
@see Pa_OpenStream, Pa_OpenDefaultStream, PaDeviceInfo
@see paFloat32, paInt16, paInt32, paInt24, paInt8
@ -435,9 +437,9 @@ typedef unsigned long PaSampleFormat;
#define paInt16 ((PaSampleFormat) 0x00000008) /**< @see PaSampleFormat */
#define paInt8 ((PaSampleFormat) 0x00000010) /**< @see PaSampleFormat */
#define paUInt8 ((PaSampleFormat) 0x00000020) /**< @see PaSampleFormat */
#define paCustomFormat ((PaSampleFormat) 0x00010000)/**< @see PaSampleFormat */
#define paCustomFormat ((PaSampleFormat) 0x00010000) /**< @see PaSampleFormat */
#define paNonInterleaved ((PaSampleFormat) 0x80000000)
#define paNonInterleaved ((PaSampleFormat) 0x80000000) /**< @see PaSampleFormat */
/** A structure providing information and capabilities of PortAudio devices.
Devices may support input, output or both input and output.
@ -704,8 +706,12 @@ typedef enum PaStreamCallbackResult
They consume, process or generate audio in response to requests from an
active PortAudio stream.
@param input and @param output are arrays of interleaved samples,
the format, packing and number of channels used by the buffers are
@param input and @param output are either arrays of interleaved samples or;
if non-interleaved samples were requested using the paNonInterleaved sample
format flag, an array of buffer pointers, one non-interleaved buffer for
each channel.
The format, packing and number of channels used by the buffers are
determined by parameters to Pa_OpenStream().
@param frameCount The number of sample frames to be processed by
@ -1040,9 +1046,9 @@ double Pa_GetStreamCpuLoad( PaStream* stream );
@param buffer A pointer to a buffer of sample frames. The buffer contains
samples in the format specified by the inputParameters->sampleFormat field
used to open the stream, and the number of channels specified by
inputParameters->numChannels. If non-interleaved samples were requested,
buffer is a pointer to the first element of an array of non-interleaved
buffer pointers, one for each channel.
inputParameters->numChannels. If non-interleaved samples were requested using
the paNonInterleaved sample format flag, buffer is a pointer to the first element
of an array of buffer pointers, one non-interleaved buffer for each channel.
@param frames The number of frames to be read into buffer. This parameter
is not constrained to a specific range, however high performance applications
@ -1066,9 +1072,9 @@ PaError Pa_ReadStream( PaStream* stream,
@param buffer A pointer to a buffer of sample frames. The buffer contains
samples in the format specified by the outputParameters->sampleFormat field
used to open the stream, and the number of channels specified by
outputParameters->numChannels. If non-interleaved samples were requested,
buffer is a pointer to the first element of an array of non-interleaved
buffer pointers, one for each channel.
outputParameters->numChannels. If non-interleaved samples were requested using
the paNonInterleaved sample format flag, buffer is a pointer to the first element
of an array of buffer pointers, one non-interleaved buffer for each channel.
@param frames The number of frames to be written from buffer. This parameter
is not constrained to a specific range, however high performance applications

View File

@ -1,5 +1,5 @@
/*
* $Id: pa_ringbuffer.c 1421 2009-11-18 16:09:05Z bjornroche $
* $Id: pa_ringbuffer.c 1549 2010-10-24 10:21:35Z rossb $
* Portable Audio I/O Library
* Ring Buffer utility.
*
@ -91,7 +91,7 @@ ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf )
}
/***************************************************************************
** Clear buffer. Should only be called when buffer is NOT being read. */
** Clear buffer. Should only be called when buffer is NOT being read or written. */
void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf )
{
rbuf->writeIndex = rbuf->readIndex = 0;

View File

@ -1,7 +1,7 @@
#ifndef PA_RINGBUFFER_H
#define PA_RINGBUFFER_H
/*
* $Id: pa_ringbuffer.h 1421 2009-11-18 16:09:05Z bjornroche $
* $Id: pa_ringbuffer.h 1549 2010-10-24 10:21:35Z rossb $
* Portable Audio I/O Library
* Ring Buffer utility.
*
@ -98,13 +98,13 @@ typedef struct PaUtilRingBuffer
char *buffer; /**< Pointer to the buffer containing the actual data. */
}PaUtilRingBuffer;
/** Initialize Ring Buffer.
/** Initialize Ring Buffer to empty state ready to have elements written to it.
@param rbuf The ring buffer.
@param elementSizeBytes The size of a single data element in bytes.
@param elementCount The number of elements in the buffer (must be power of 2).
@param elementCount The number of elements in the buffer (must be a power of 2).
@param dataPtr A pointer to a previously allocated area where the data
will be maintained. It must be elementCount*elementSizeBytes long.
@ -113,7 +113,7 @@ typedef struct PaUtilRingBuffer
*/
ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr );
/** Clear buffer. Should only be called when buffer is NOT being read.
/** Reset buffer to empty. Should only be called when buffer is NOT being read or written.
@param rbuf The ring buffer.
*/
@ -193,7 +193,7 @@ ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, rin
*/
ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount );
/** Get address of region(s) from which we can write data.
/** Get address of region(s) from which we can read data.
@param rbuf The ring buffer.

View File

@ -1,5 +1,5 @@
/*
* $Id: pa_linux_alsa.c 1540 2010-09-20 16:23:30Z dmitrykos $
* $Id: pa_linux_alsa.c 1543 2010-10-15 20:39:23Z dmitrykos $
* PortAudio Portable Real-Time Audio Library
* Latest Version at: http://www.portaudio.com
* ALSA implementation by Joshua Haberman and Arve Knudsen
@ -239,9 +239,12 @@ static const PaAlsaDeviceInfo *GetDeviceInfo( const PaUtilHostApiRepresentation
return (const PaAlsaDeviceInfo *)hostApi->deviceInfos[device];
}
static void AlsaErrorHandler(const char *file, int line, const char *function, int err, const char *fmt, ...)
/** Uncommented because AlsaErrorHandler is unused for anything good yet. If AlsaErrorHandler is
to be used, do not forget to register this callback in PaAlsa_Initialize, and unregister in Terminate.
*/
/*static void AlsaErrorHandler(const char *file, int line, const char *function, int err, const char *fmt, ...)
{
}
}*/
PaError PaAlsa_Initialize( PaUtilHostApiRepresentation **hostApi, PaHostApiIndex hostApiIndex )
{
@ -262,7 +265,10 @@ PaError PaAlsa_Initialize( PaUtilHostApiRepresentation **hostApi, PaHostApiIndex
(*hostApi)->OpenStream = OpenStream;
(*hostApi)->IsFormatSupported = IsFormatSupported;
ENSURE_( snd_lib_error_set_handler(AlsaErrorHandler), paUnanticipatedHostError );
/** If AlsaErrorHandler is to be used, do not forget to unregister callback pointer in
Terminate function.
*/
/*ENSURE_( snd_lib_error_set_handler(AlsaErrorHandler), paUnanticipatedHostError );*/
PA_ENSURE( BuildDeviceList( alsaHostApi ) );
@ -308,6 +314,10 @@ static void Terminate( struct PaUtilHostApiRepresentation *hostApi )
PaAlsaHostApiRepresentation *alsaHostApi = (PaAlsaHostApiRepresentation*)hostApi;
assert( hostApi );
/** See AlsaErrorHandler and PaAlsa_Initialize for details.
*/
/*snd_lib_error_set_handler(NULL);*/
if( alsaHostApi->allocations )
{

View File

@ -1611,7 +1611,7 @@ static PaError OpenStream( struct PaUtilHostApiRepresentation *hostApi,
/*now, we need to allocate memory for the ring buffer*/
data = calloc( ringSize, szfl );
data = calloc( ringSize, szfl*inputParameters->channelCount );
if( !data )
{
result = paInsufficientMemory;
@ -1619,20 +1619,11 @@ static PaError OpenStream( struct PaUtilHostApiRepresentation *hostApi,
}
/* now we can initialize the ring buffer */
//FIXME: element size whould probably be szfl*inputchan
// but that will require some work all over the
// place to patch up. szfl may be sufficient and would
// be way easier to handle, but it seems clear from the
// discussion that buffer processor compatibility
// requires szfl*inputchan.
// See revision 1346 and discussion:
// http://techweb.rfa.org/pipermail/portaudio/2008-February/008295.html
PaUtil_InitializeRingBuffer( &stream->inputRingBuffer,
1, ringSize*szfl, data ) ;
PaUtil_InitializeRingBuffer( &stream->inputRingBuffer, szfl*inputParameters->channelCount, ringSize, data ) ;
/* advance the read point a little, so we are reading from the
middle of the buffer */
if( stream->outputUnit )
PaUtil_AdvanceRingBufferWriteIndex( &stream->inputRingBuffer, ringSize*szfl / RING_BUFFER_ADVANCE_DENOMINATOR );
PaUtil_AdvanceRingBufferWriteIndex( &stream->inputRingBuffer, ringSize / RING_BUFFER_ADVANCE_DENOMINATOR );
}
}
@ -1797,12 +1788,14 @@ static OSStatus ringBufferIOProc( AudioConverterRef inAudioConverter,
return RING_BUFFER_EMPTY;
}
assert(sizeof(UInt32) == sizeof(ring_buffer_size_t));
assert( ( (*ioDataSize) / rb->elementSizeBytes ) * rb->elementSizeBytes == (*ioDataSize) ) ;
(*ioDataSize) /= rb->elementSizeBytes ;
PaUtil_GetRingBufferReadRegions( rb, *ioDataSize,
outData, (ring_buffer_size_t *)ioDataSize,
&dummyData, &dummySize );
assert( *ioDataSize );
PaUtil_AdvanceRingBufferReadIndex( rb, *ioDataSize );
(*ioDataSize) *= rb->elementSizeBytes ;
return noErr;
}
@ -2056,10 +2049,10 @@ static OSStatus AudioIOProc( void *inRefCon,
void *data1, *data2;
ring_buffer_size_t size1, size2;
PaUtil_GetRingBufferReadRegions( &stream->inputRingBuffer,
inChan*frames*flsz,
frames,
&data1, &size1,
&data2, &size2 );
if( size1 / ( flsz * inChan ) == frames ) {
if( size1 == frames ) {
/* simplest case: all in first buffer */
PaUtil_SetInputFrameCount( &(stream->bufferProcessor), frames );
PaUtil_SetInterleavedInputChannels( &(stream->bufferProcessor),
@ -2070,7 +2063,7 @@ static OSStatus AudioIOProc( void *inRefCon,
PaUtil_EndBufferProcessing( &(stream->bufferProcessor),
&callbackResult );
PaUtil_AdvanceRingBufferReadIndex(&stream->inputRingBuffer, size1 );
} else if( ( size1 + size2 ) / ( flsz * inChan ) < frames ) {
} else if( size1 + size2 < frames ) {
/*we underflowed. take what data we can, zero the rest.*/
unsigned char data[frames*inChan*flsz];
if( size1 )
@ -2093,14 +2086,12 @@ static OSStatus AudioIOProc( void *inRefCon,
stream->xrunFlags |= paInputUnderflow;
} else {
/*we got all the data, but split between buffers*/
PaUtil_SetInputFrameCount( &(stream->bufferProcessor),
size1 / ( flsz * inChan ) );
PaUtil_SetInputFrameCount( &(stream->bufferProcessor), size1 );
PaUtil_SetInterleavedInputChannels( &(stream->bufferProcessor),
0,
data1,
inChan );
PaUtil_Set2ndInputFrameCount( &(stream->bufferProcessor),
size2 / ( flsz * inChan ) );
PaUtil_Set2ndInputFrameCount( &(stream->bufferProcessor), size2 );
PaUtil_Set2ndInterleavedInputChannels( &(stream->bufferProcessor),
0,
data2,
@ -2150,7 +2141,7 @@ static OSStatus AudioIOProc( void *inRefCon,
bytesIn = sizeof( float ) * inNumberFrames * chan;
bytesOut = PaUtil_WriteRingBuffer( &stream->inputRingBuffer,
stream->inputAudioBufferList.mBuffers[0].mData,
bytesIn );
inNumberFrames );
if( bytesIn != bytesOut )
stream->xrunFlags |= paInputOverflow ;
}

View File

@ -246,8 +246,8 @@ long computeRingBufferSize( const PaStreamParameters *inputParameters,
long ringSize;
int index;
int i;
double latencyTimesChannelCount ;
long framesPerBufferTimesChannelCount ;
double latency ;
long framesPerBuffer ;
VVDBUG(( "computeRingBufferSize()\n" ));
@ -255,33 +255,25 @@ long computeRingBufferSize( const PaStreamParameters *inputParameters,
if( outputParameters && inputParameters )
{
latencyTimesChannelCount = MAX(
inputParameters->suggestedLatency * inputParameters->channelCount,
outputParameters->suggestedLatency * outputParameters->channelCount );
framesPerBufferTimesChannelCount = MAX(
inputFramesPerBuffer * inputParameters->channelCount,
outputFramesPerBuffer * outputParameters->channelCount );
latency = MAX( inputParameters->suggestedLatency, outputParameters->suggestedLatency );
framesPerBuffer = MAX( inputFramesPerBuffer, outputFramesPerBuffer );
}
else if( outputParameters )
{
latencyTimesChannelCount
= outputParameters->suggestedLatency * outputParameters->channelCount;
framesPerBufferTimesChannelCount
= outputFramesPerBuffer * outputParameters->channelCount;
latency = outputParameters->suggestedLatency;
framesPerBuffer = outputFramesPerBuffer ;
}
else /* we have inputParameters */
{
latencyTimesChannelCount
= inputParameters->suggestedLatency * inputParameters->channelCount;
framesPerBufferTimesChannelCount
= inputFramesPerBuffer * inputParameters->channelCount;
latency = inputParameters->suggestedLatency;
framesPerBuffer = inputFramesPerBuffer ;
}
ringSize = (long) ( latencyTimesChannelCount * sampleRate * 2 + .5);
VDBUG( ( "suggested latency * channelCount: %d\n", (int) (latencyTimesChannelCount*sampleRate) ) );
if( ringSize < framesPerBufferTimesChannelCount * 3 )
ringSize = framesPerBufferTimesChannelCount * 3 ;
VDBUG(("framesPerBuffer*channelCount:%d\n",(int)framesPerBufferTimesChannelCount));
ringSize = (long) ( latency * sampleRate * 2 + .5);
VDBUG( ( "suggested latency : %d\n", (int) (latency*sampleRate) ) );
if( ringSize < framesPerBuffer * 3 )
ringSize = framesPerBuffer * 3 ;
VDBUG(("framesPerBuffer:%d\n",(int)framesPerBuffer));
VDBUG(("Ringbuffer size (1): %d\n", (int)ringSize ));
/* make sure it's at least 4 */

View File

@ -46,6 +46,7 @@
#include <windows.h>
#include <stdio.h>
#include <process.h>
#include <assert.h>
#include <mmsystem.h>
#include <mmreg.h> // must be before other Wasapi headers
#if defined(_MSC_VER) && (_MSC_VER >= 1400)
@ -631,13 +632,13 @@ static UINT32 ALIGN_BWD(UINT32 v, UINT32 align)
// ------------------------------------------------------------------------------------------
// Aligns 'v' forward
/*static UINT32 ALIGN_FWD(UINT32 v, UINT32 align)
static UINT32 ALIGN_FWD(UINT32 v, UINT32 align)
{
UINT32 remainder = (align ? (v % align) : 0);
if (remainder == 0)
return v;
return v + (align - remainder);
}*/
}
// ------------------------------------------------------------------------------------------
// Aligns WASAPI buffer to 128 byte packet boundary. HD Audio will fail to play if buffer
@ -653,6 +654,11 @@ static UINT32 AlignFramesPerBuffer(UINT32 nFrames, UINT32 nSamplesPerSec, UINT32
// align to packet size
frame_bytes = pAlignFunc(frame_bytes, HDA_PACKET_SIZE); // use ALIGN_FWD if bigger but safer period is more desired
// atlest 1 frame must be available
if (frame_bytes < HDA_PACKET_SIZE)
frame_bytes = HDA_PACKET_SIZE;
nFrames = frame_bytes / nBlockAlign;
packets = frame_bytes / HDA_PACKET_SIZE;
@ -677,6 +683,19 @@ static UINT32 GetFramesSleepTime(UINT32 nFrames, UINT32 nSamplesPerSec)
return (UINT32)(nDuration/REFTIMES_PER_MILLISEC/2);
}
// ------------------------------------------------------------------------------------------
static UINT32 GetFramesSleepTimeMicroseconds(UINT32 nFrames, UINT32 nSamplesPerSec)
{
REFERENCE_TIME nDuration;
if (nSamplesPerSec == 0)
return 0;
#define REFTIMES_PER_SEC 10000000
#define REFTIMES_PER_MILLISEC 10000
// Calculate the actual duration of the allocated buffer.
nDuration = (REFERENCE_TIME)((double)REFTIMES_PER_SEC * nFrames / nSamplesPerSec);
return (UINT32)(nDuration/10/2);
}
// ------------------------------------------------------------------------------------------
static BOOL SetupAVRT()
{
@ -1371,20 +1390,20 @@ static void LogWAVEFORMATEXTENSIBLE(const WAVEFORMATEXTENSIBLE *in)
{
case WAVE_FORMAT_EXTENSIBLE: {
PRINT(("wFormatTag=WAVE_FORMAT_EXTENSIBLE\n"));
PRINT(("wFormatTag =WAVE_FORMAT_EXTENSIBLE\n"));
if (IsEqualGUID(&in->SubFormat, &pa_KSDATAFORMAT_SUBTYPE_IEEE_FLOAT))
{
PRINT(("SubFormat=KSDATAFORMAT_SUBTYPE_IEEE_FLOAT\n"));
PRINT(("SubFormat =KSDATAFORMAT_SUBTYPE_IEEE_FLOAT\n"));
}
else
if (IsEqualGUID(&in->SubFormat, &pa_KSDATAFORMAT_SUBTYPE_PCM))
{
PRINT(("SubFormat=KSDATAFORMAT_SUBTYPE_PCM\n"));
PRINT(("SubFormat =KSDATAFORMAT_SUBTYPE_PCM\n"));
}
else
{
PRINT(("SubFormat=CUSTOM GUID{%d:%d:%d:%d%d%d%d%d%d%d%d}\n",
PRINT(("SubFormat =CUSTOM GUID{%d:%d:%d:%d%d%d%d%d%d%d%d}\n",
in->SubFormat.Data1,
in->SubFormat.Data2,
in->SubFormat.Data3,
@ -1397,14 +1416,15 @@ static void LogWAVEFORMATEXTENSIBLE(const WAVEFORMATEXTENSIBLE *in)
(int)in->SubFormat.Data4[6],
(int)in->SubFormat.Data4[7]));
}
PRINT(("Samples.wValidBitsPerSample=%d\n", in->Samples.wValidBitsPerSample));
PRINT(("Samples.wValidBitsPerSample =%d\n", in->Samples.wValidBitsPerSample));
PRINT(("dwChannelMask =0x%X\n",in->dwChannelMask));
break; }
case WAVE_FORMAT_PCM: PRINT(("wFormatTag=WAVE_FORMAT_PCM\n")); break;
case WAVE_FORMAT_IEEE_FLOAT: PRINT(("wFormatTag=WAVE_FORMAT_IEEE_FLOAT\n")); break;
default : PRINT(("wFormatTag=UNKNOWN(%d)\n",old->wFormatTag)); break;
case WAVE_FORMAT_PCM: PRINT(("wFormatTag =WAVE_FORMAT_PCM\n")); break;
case WAVE_FORMAT_IEEE_FLOAT: PRINT(("wFormatTag =WAVE_FORMAT_IEEE_FLOAT\n")); break;
default:
PRINT(("wFormatTag =UNKNOWN(%d)\n",old->wFormatTag)); break;
}
PRINT(("nChannels =%d\n",old->nChannels));
@ -1487,14 +1507,14 @@ static PaError MakeWaveFormatFromParams(WAVEFORMATEXTENSIBLE *wavex, const PaStr
old->nBlockAlign = (old->nChannels * (old->wBitsPerSample/8));
old->nAvgBytesPerSec = (old->nSamplesPerSec * old->nBlockAlign);
//WAVEFORMATEX
/*if ((params->channelCount <= 2) && ((bitsPerSample == 16) || (bitsPerSample == 8)))
// WAVEFORMATEX
if ((params->channelCount <= 2) && ((bitsPerSample == 16) || (bitsPerSample == 8)))
{
old->cbSize = 0;
old->wFormatTag = WAVE_FORMAT_PCM;
}
//WAVEFORMATEXTENSIBLE
else*/
// WAVEFORMATEXTENSIBLE
else
{
old->wFormatTag = WAVE_FORMAT_EXTENSIBLE;
old->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX);
@ -1517,10 +1537,26 @@ static PaError MakeWaveFormatFromParams(WAVEFORMATEXTENSIBLE *wavex, const PaStr
{
case 1: wavex->dwChannelMask = KSAUDIO_SPEAKER_MONO; break;
case 2: wavex->dwChannelMask = KSAUDIO_SPEAKER_STEREO; break;
case 3: wavex->dwChannelMask = KSAUDIO_SPEAKER_STEREO|SPEAKER_LOW_FREQUENCY; break;
case 4: wavex->dwChannelMask = KSAUDIO_SPEAKER_QUAD; break;
case 5: wavex->dwChannelMask = KSAUDIO_SPEAKER_QUAD|SPEAKER_LOW_FREQUENCY; break;
#ifdef KSAUDIO_SPEAKER_5POINT1_SURROUND
case 6: wavex->dwChannelMask = KSAUDIO_SPEAKER_5POINT1_SURROUND; break;
#else
case 6: wavex->dwChannelMask = KSAUDIO_SPEAKER_5POINT1; break;
#endif
#ifdef KSAUDIO_SPEAKER_5POINT1_SURROUND
case 7: wavex->dwChannelMask = KSAUDIO_SPEAKER_5POINT1_SURROUND|SPEAKER_BACK_CENTER; break;
#else
case 7: wavex->dwChannelMask = KSAUDIO_SPEAKER_5POINT1|SPEAKER_BACK_CENTER; break;
#endif
#ifdef KSAUDIO_SPEAKER_7POINT1_SURROUND
case 8: wavex->dwChannelMask = KSAUDIO_SPEAKER_7POINT1_SURROUND; break;
#else
case 8: wavex->dwChannelMask = KSAUDIO_SPEAKER_7POINT1; break;
default: wavex->dwChannelMask = 0; break;
#endif
default: wavex->dwChannelMask = 0;
}
}
}
@ -2036,7 +2072,7 @@ static HRESULT CreateAudioClient(PaWasapiStream *pStream, PaWasapiSubStream *pSu
pSub->period = pInfo->MinimumDevicePeriod;
// Recalculate aligned period
framesPerLatency = MakeFramesFromHns(pSub->period, pSub->wavex.Format.nSamplesPerSec);
_CalculateAlignedPeriod(pSub, &framesPerLatency, ALIGN_BWD);
_CalculateAlignedPeriod(pSub, &framesPerLatency, ALIGN_FWD);
}
}
@ -3699,14 +3735,17 @@ static HRESULT ProcessInputBuffer(PaWasapiStream *stream, PaWasapiHostProcessor
for (;;)
{
// Check if blocking call must be interrupted
if (WaitForSingleObject(stream->hCloseRequest, 1) != WAIT_TIMEOUT)
if (WaitForSingleObject(stream->hCloseRequest, 0) != WAIT_TIMEOUT)
break;
// Get the available data in the shared buffer.
if ((hr = IAudioCaptureClient_GetBuffer(stream->cclient, &data, &frames, &flags, NULL, NULL)) != S_OK)
{
if (hr == AUDCLNT_S_BUFFER_EMPTY)
{
hr = S_OK;
break; // capture buffer exhausted
}
return LogHostError(hr);
break;
@ -3976,6 +4015,46 @@ static HRESULT PollGetOutputFramesAvailable(PaWasapiStream *stream, UINT32 *avai
return hr;
}
// ------------------------------------------------------------------------------------------
/*! \class ThreadSleepScheduler
Allows to emulate thread sleep of less than 1 millisecond under Windows. Scheduler
calculates number of times the thread must run untill next sleep of 1 millisecond.
It does not make thread sleeping for real number of microseconds but rather controls
how many of imaginary microseconds the thread task can allow thread to sleep.
*/
typedef struct ThreadIdleScheduler
{
UINT32 m_idle_microseconds; //!< number of microseconds to sleep
UINT32 m_next_sleep; //!< next sleep round
UINT32 m_i; //!< current round iterator position
UINT32 m_resolution; //!< resolution in number of milliseconds
}
ThreadIdleScheduler;
//! Setup scheduler.
static void ThreadIdleScheduler_Setup(ThreadIdleScheduler *sched, UINT32 resolution, UINT32 microseconds)
{
assert(microseconds != 0);
assert(resolution != 0);
assert((resolution * 1000) >= microseconds);
memset(sched, 0, sizeof(*sched));
sched->m_idle_microseconds = microseconds;
sched->m_resolution = resolution;
sched->m_next_sleep = (resolution * 1000) / microseconds;
}
//! Iterate and check if can sleep.
static UINT32 ThreadIdleScheduler_NextSleep(ThreadIdleScheduler *sched)
{
// advance and check if thread can sleep
if (++ sched->m_i == sched->m_next_sleep)
{
sched->m_i = 0;
return sched->m_resolution;
}
return 0;
}
// ------------------------------------------------------------------------------------------
PA_THREAD_FUNC ProcThreadPoll(void *param)
{
@ -3984,6 +4063,7 @@ PA_THREAD_FUNC ProcThreadPoll(void *param)
PaWasapiStream *stream = (PaWasapiStream *)param;
PaWasapiHostProcessor defaultProcessor;
INT32 i;
ThreadIdleScheduler scheduler;
// Calculate the actual duration of the allocated buffer.
DWORD sleep_ms = 0;
@ -4004,9 +4084,24 @@ PA_THREAD_FUNC ProcThreadPoll(void *param)
{
sleep_ms = (sleep_ms_in ? sleep_ms_in : sleep_ms_out);
}
// Make sure not 0
// Make sure not 0, othervise use ThreadIdleScheduler
if (sleep_ms == 0)
sleep_ms = 1;
{
sleep_ms_in = GetFramesSleepTimeMicroseconds(stream->bufferProcessor.framesPerUserBuffer, stream->in.wavex.Format.nSamplesPerSec);
sleep_ms_out = GetFramesSleepTimeMicroseconds(stream->bufferProcessor.framesPerUserBuffer, stream->out.wavex.Format.nSamplesPerSec);
// Choose smallest
if ((sleep_ms_in != 0) && (sleep_ms_out != 0))
sleep_ms = min(sleep_ms_in, sleep_ms_out);
else
{
sleep_ms = (sleep_ms_in ? sleep_ms_in : sleep_ms_out);
}
// Setup thread sleep scheduler
ThreadIdleScheduler_Setup(&scheduler, 1, sleep_ms/* microseconds here actually */);
sleep_ms = 0;
}
// Setup data processors
defaultProcessor.processor = WaspiHostProcessingLoop;
@ -4093,8 +4188,15 @@ PA_THREAD_FUNC ProcThreadPoll(void *param)
if (!PA_WASAPI__IS_FULLDUPLEX(stream))
{
// Processing Loop
while (WaitForSingleObject(stream->hCloseRequest, sleep_ms) == WAIT_TIMEOUT)
UINT32 next_sleep = sleep_ms;
while (WaitForSingleObject(stream->hCloseRequest, next_sleep) == WAIT_TIMEOUT)
{
// Get next sleep time
if (sleep_ms == 0)
{
next_sleep = ThreadIdleScheduler_NextSleep(&scheduler);
}
for (i = 0; i < S_COUNT; ++i)
{
// Process S_INPUT/S_OUTPUT
@ -4267,8 +4369,8 @@ PA_THREAD_FUNC ProcThreadPoll(void *param)
}
#else
// Processing Loop
//sleep_ms = 1;
while (WaitForSingleObject(stream->hCloseRequest, sleep_ms) == WAIT_TIMEOUT)
UINT32 next_sleep = sleep_ms;
while (WaitForSingleObject(stream->hCloseRequest, next_sleep) == WAIT_TIMEOUT)
{
UINT32 i_frames = 0, i_processed = 0;
BYTE *i_data = NULL, *o_data = NULL, *o_data_host = NULL;
@ -4280,6 +4382,12 @@ PA_THREAD_FUNC ProcThreadPoll(void *param)
//if (stream->in.shareMode == AUDCLNT_SHAREMODE_EXCLUSIVE)
// sleep_ms = !sleep_ms;
// Get next sleep time
if (sleep_ms == 0)
{
next_sleep = ThreadIdleScheduler_NextSleep(&scheduler);
}
// get available frames
if ((hr = PollGetOutputFramesAvailable(stream, &o_frames)) != S_OK)
{