Timeline-based packet scheduling.

This commit is contained in:
Michael Jumper
2012-10-30 17:05:15 -07:00
parent 01b4c41650
commit 099b4c0126

View File

@@ -54,32 +54,9 @@ Guacamole.AudioChannel = function() {
var channel = this; var channel = this;
/** /**
* Packet queue. * When the next packet should play.
*/ */
var packets = []; var next_packet_time = 0;
/**
* Whether this channel is currently playing sound.
*/
var playing = false;
/**
* Advances to the next audio packet, if any, and plays it.
*/
function advance() {
// If packets remain, play next
if (packets.length != 0) {
var packet = packets.shift();
packet.play();
window.setTimeout(advance, packet.duration);
}
// Otherwise, no longer playing
else
playing = false;
}
/** /**
* Queues up the given data for playing by this channel once all previously * Queues up the given data for playing by this channel once all previously
@@ -94,18 +71,20 @@ Guacamole.AudioChannel = function() {
this.play = function(mimetype, duration, data) { this.play = function(mimetype, duration, data) {
var packet = var packet =
new Guacamole.AudioChannel.Packet(mimetype, duration, data); new Guacamole.AudioChannel.Packet(mimetype, data);
// If currently playing sound, add packet to queue var now = new Date().getTime();
if (playing)
packets.push(packet);
// Otherwise, play now, flag channel as playing // If time not initialized, initialize now
else { if (next_packet_time == 0)
playing = true; next_packet_time = now;
packet.play();
window.setTimeout(advance, packet.duration); var time_until_play = next_packet_time - now;
}
// Schedule next packet
next_packet_time += duration;
packet.play(time_until_play);
}; };
@@ -125,12 +104,7 @@ if (window.webkitAudioContext) {
* @param {Number} duration The duration of the data contained by this packet. * @param {Number} duration The duration of the data contained by this packet.
* @param {String} data The base64-encoded sound data contained by this packet. * @param {String} data The base64-encoded sound data contained by this packet.
*/ */
Guacamole.AudioChannel.Packet = function(mimetype, duration, data) { Guacamole.AudioChannel.Packet = function(mimetype, data) {
/**
* The duration of this packet, in milliseconds.
*/
this.duration = duration;
// If audio API available, use it. // If audio API available, use it.
if (Guacamole.AudioChannel.context) { if (Guacamole.AudioChannel.context) {
@@ -162,20 +136,34 @@ Guacamole.AudioChannel.Packet = function(mimetype, duration, data) {
var source = Guacamole.AudioChannel.context.createBufferSource(); var source = Guacamole.AudioChannel.context.createBufferSource();
source.connect(Guacamole.AudioChannel.context.destination); source.connect(Guacamole.AudioChannel.context.destination);
function playImmediately(buffer) { var play_call;
var play_delay;
function playDelayed(buffer) {
// Calculate time since call to play()
var offset = new Date().getTime() - play_call;
source.buffer = buffer; source.buffer = buffer;
source.noteOn(0); source.noteOn(Math.max(play_delay - offset, 0) / 1000);
if (offset > play_delay)
console.log("processing lag", offset - play_delay);
} }
this.play = function() { this.play = function(delay) {
play_call = new Date().getTime();
play_delay = delay;
// If buffer available, play it NOW // If buffer available, play it NOW
if (readyBuffer) if (readyBuffer)
playImmediately(readyBuffer); playDelayed(readyBuffer);
// Otherwise, play when decoded // Otherwise, play when decoded
else else
handleReady = playImmediately; handleReady = playDelayed;
}; };
@@ -193,8 +181,10 @@ Guacamole.AudioChannel.Packet = function(mimetype, duration, data) {
/** /**
* Plays the sound data contained by this packet immediately. * Plays the sound data contained by this packet immediately.
*/ */
this.play = function() { this.play = function(delay) {
audio.play(); window.setTimeout(function() {
audio.play();
}, delay);
}; };
} }