Merge Lanczos changes for front end.

This commit is contained in:
James Muehlner
2016-05-23 20:26:40 -07:00

View File

@@ -124,7 +124,18 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
* @constant
* @type {Number}
*/
var BUFFER_SIZE = 512;
var BUFFER_SIZE = 2048;
/**
* The window size to use when applying Lanczos interpolation, commonly
* denoted by the variable "a".
* See: https://en.wikipedia.org/wiki/Lanczos_resampling
*
* @private
* @contant
* @type Number
*/
var LANCZOS_WINDOW_SIZE = 3;
/**
* The format of audio this recorder will encode.
@@ -183,6 +194,133 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
*/
var maxSampleValue = (format.bytesPerSample === 1) ? 128 : 32768;
/**
* The total number of audio samples read from the local audio input device
* over the life of this audio recorder.
*
* @private
* @type {Number}
*/
var readSamples = 0;
/**
* The total number of audio samples written to the underlying Guacamole
* connection over the life of this audio recorder.
*
* @private
* @type {Number}
*/
var writtenSamples = 0;
/**
* The source node providing access to the local audio input device.
*
* @private
* @type {MediaStreamAudioSourceNode}
*/
var source = null;
/**
* The script processing node which receives audio input from the media
* stream source node as individual audio buffers.
*
* @private
* @type {ScriptProcessorNode}
*/
var processor = null;
/**
* The normalized sinc function. The normalized sinc function is defined as
* 1 for x=0 and sin(PI * x) / (PI * x) for all other values of x.
*
* See: https://en.wikipedia.org/wiki/Sinc_function
*
* @private
* @param {Number} x
* The point at which the normalized sinc function should be computed.
*
* @returns {Number}
* The value of the normalized sinc function at x.
*/
var sinc = function sinc(x) {
// The value of sinc(0) is defined as 1
if (x === 0)
return 1;
// Otherwise, normlized sinc(x) is sin(PI * x) / (PI * x)
var piX = Math.PI * x;
return Math.sin(piX) / piX;
};
/**
* Calculates the value of the Lanczos kernal at point x for a given window
* size. See: https://en.wikipedia.org/wiki/Lanczos_resampling
*
* @private
* @param {Number} x
* The point at which the value of the Lanczos kernel should be
* computed.
*
* @param {Number} a
* The window size to use for the Lanczos kernel.
*
* @returns {Number}
* The value of the Lanczos kernel at the given point for the given
* window size.
*/
var lanczos = function lanczos(x, a) {
// Lanczos is sinc(x) * sinc(x / a) for -a < x < a ...
if (-a < x && x < a)
return sinc(x) * sinc(x / a);
// ... and 0 otherwise
return 0;
};
/**
* Determines the value of the waveform represented by the audio data at
* the given location. If the value cannot be determined exactly as it does
* not correspond to an exact sample within the audio data, the value will
* be derived through interpolating nearby samples.
*
* @private
* @param {Float32Array} audioData
* An array of audio data, as returned by AudioBuffer.getChannelData().
*
* @param {Number} t
* The relative location within the waveform from which the value
* should be retrieved, represented as a floating point number between
* 0 and 1 inclusive, where 0 represents the earliest point in time and
* 1 represents the latest.
*
* @returns {Number}
* The value of the waveform at the given location.
*/
var interpolateSample = function getValueAt(audioData, t) {
// Convert [0, 1] range to [0, audioData.length - 1]
var index = (audioData.length - 1) * t;
// Determine the start and end points for the summation used by the
// Lanczos interpolation algorithm (see: https://en.wikipedia.org/wiki/Lanczos_resampling)
var start = Math.floor(index) - LANCZOS_WINDOW_SIZE + 1;
var end = Math.floor(index) + LANCZOS_WINDOW_SIZE;
// Calculate the value of the Lanczos interpolation function for the
// required range
var sum = 0;
for (var i = start; i <= end; i++) {
sum += (audioData[i] || 0) * lanczos(index - i, LANCZOS_WINDOW_SIZE);
}
return sum;
};
/**
* Converts the given AudioBuffer into an audio packet, ready for streaming
* along the underlying output stream. Unlike the raw audio packets used by
@@ -200,9 +338,18 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
*/
var toSampleArray = function toSampleArray(audioBuffer) {
// Calculate the number of samples in both input and output
// Track overall amount of data read
var inSamples = audioBuffer.length;
var outSamples = Math.floor(audioBuffer.duration * format.rate);
readSamples += inSamples;
// Calculate the total number of samples that should be written as of
// the audio data just received and adjust the size of the output
// packet accordingly
var expectedWrittenSamples = Math.round(readSamples * format.rate / audioBuffer.sampleRate);
var outSamples = expectedWrittenSamples - writtenSamples;
// Update number of samples written
writtenSamples += outSamples;
// Get array for raw PCM storage
var data = new SampleArray(outSamples * format.channels);
@@ -215,13 +362,8 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
// Fill array with data from audio buffer channel
var offset = channel;
for (var i = 0; i < outSamples; i++) {
// Apply naiive resampling
var inOffset = Math.floor(i / outSamples * inSamples);
data[offset] = Math.floor(audioData[inOffset] * maxSampleValue);
data[offset] = interpolateSample(audioData, i / (outSamples - 1)) * maxSampleValue;
offset += format.channels;
}
}
@@ -235,15 +377,29 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
// Abort stream if rejected
if (status.code !== Guacamole.Status.Code.SUCCESS) {
// Disconnect media source node from script processor
if (source)
source.disconnect();
// Disconnect associated script processor node
if (processor)
processor.disconnect();
// Remove references to now-unneeded components
processor = null;
source = null;
writer.sendEnd();
return;
}
// Attempt to retrieve an audio input stream from the browser
getUserMedia({ 'audio' : true }, function streamReceived(mediaStream) {
// Create processing node which receives appropriately-sized audio buffers
var processor = context.createScriptProcessor(BUFFER_SIZE, format.channels, format.channels);
processor = context.createScriptProcessor(BUFFER_SIZE, format.channels, format.channels);
processor.connect(context.destination);
// Send blobs when audio buffers are received
@@ -252,7 +408,7 @@ Guacamole.RawAudioRecorder = function RawAudioRecorder(stream, mimetype) {
};
// Connect processing node to user's audio input source
var source = context.createMediaStreamSource(mediaStream);
source = context.createMediaStreamSource(mediaStream);
source.connect(processor);
}, function streamDenied() {