1
0
mirror of https://github.com/KubaPro010/fm-dx-webserver.git synced 2026-02-26 22:13:53 +01:00

3LAS implementation

This commit is contained in:
NoobishSVK
2024-01-27 11:46:52 +01:00
parent 5ab4a39bd5
commit 8ce697a960
42 changed files with 7950 additions and 40 deletions

135
web/js/3las/3las.js Normal file
View File

@@ -0,0 +1,135 @@
var _3LAS_Settings = /** @class */ (function () {
function _3LAS_Settings() {
this.SocketHost = document.location.hostname ? document.location.hostname : "127.0.0.1";
this.SocketPort = localStorage.getItem('audioPort') ? localStorage.getItem('audioPort') : 8081;
this.SocketPath = "/audio";
this.WebRTC = new WebRTC_Settings();
this.Fallback = new Fallback_Settings();
}
return _3LAS_Settings;
}());
var _3LAS = /** @class */ (function () {
function _3LAS(logger, settings) {
this.Logger = logger;
if (!this.Logger) {
this.Logger = new Logging(null, null);
}
this.Settings = settings;
try {
this.WebRTC = new WebRTC(this.Logger, this.Settings.WebRTC);
this.WebRTC.ActivityCallback = this.OnActivity.bind(this);
this.WebRTC.DisconnectCallback = this.OnSocketDisconnect.bind(this);
}
catch (_a) {
this.WebRTC = null;
}
if (this.WebRTC == null || this.WebRTC !== null) {
try {
this.Fallback = new Fallback(this.Logger, this.Settings.Fallback);
this.Fallback.ActivityCallback = this.OnActivity.bind(this);
}
catch (_b) {
this.Fallback = null;
}
}
if (this.WebRTC == null && this.Fallback == null) {
this.Logger.Log('3LAS: Browser does not support either media handling methods.');
throw new Error();
}
if (isAndroid) {
this.WakeLock = new WakeLock(this.Logger);
}
}
Object.defineProperty(_3LAS.prototype, "Volume", {
get: function () {
if (this.WebRTC)
return this.WebRTC.Volume;
else
return this.Fallback.Volume;
},
set: function (value) {
if (this.WebRTC)
this.WebRTC.Volume = value;
else
this.Fallback.Volume = value;
},
enumerable: false,
configurable: true
});
_3LAS.prototype.CanChangeVolume = function () {
if (this.WebRTC)
return this.WebRTC.CanChangeVolume();
else
return true;
};
_3LAS.prototype.Start = function () {
this.ConnectivityFlag = false;
// This is stupid, but required for iOS/iPadOS... thanks Apple :(
if (this.Settings && this.Settings.WebRTC && this.Settings.WebRTC.AudioTag)
this.Settings.WebRTC.AudioTag.play();
// This is stupid, but required for Android.... thanks Google :(
if (this.WakeLock)
this.WakeLock.Begin();
try {
this.WebSocket = new WebSocketClient(this.Logger, 'ws://' + this.Settings.SocketHost + ':' + this.Settings.SocketPort.toString() + this.Settings.SocketPath, this.OnSocketError.bind(this), this.OnSocketConnect.bind(this), this.OnSocketDataReady.bind(this), this.OnSocketDisconnect.bind(this));
this.Logger.Log("Init of WebSocketClient succeeded");
this.Logger.Log("Trying to connect to server.");
}
catch (e) {
this.Logger.Log("Init of WebSocketClient failed: " + e);
throw new Error();
}
};
_3LAS.prototype.OnActivity = function () {
if (this.ActivityCallback)
this.ActivityCallback();
if (!this.ConnectivityFlag) {
this.ConnectivityFlag = true;
if (this.ConnectivityCallback)
this.ConnectivityCallback(true);
}
};
// Callback function from socket connection
_3LAS.prototype.OnSocketError = function (message) {
this.Logger.Log("Network error: " + message);
if (this.WebRTC)
this.WebRTC.OnSocketError(message);
else
this.Fallback.OnSocketError(message);
};
_3LAS.prototype.OnSocketConnect = function () {
this.Logger.Log("Established connection with server.");
if (this.WebRTC)
this.WebRTC.OnSocketConnect();
else
this.Fallback.OnSocketConnect();
if (this.WebRTC)
this.WebRTC.Init(this.WebSocket);
else
this.Fallback.Init(this.WebSocket);
};
_3LAS.prototype.OnSocketDisconnect = function () {
this.Logger.Log("Lost connection to server.");
if (this.WebRTC)
this.WebRTC.OnSocketDisconnect();
else
this.Fallback.OnSocketDisconnect();
if (this.WebRTC)
this.WebRTC.Reset();
else
this.Fallback.Reset();
if (this.ConnectivityFlag) {
this.ConnectivityFlag = false;
if (this.ConnectivityCallback)
this.ConnectivityCallback(false);
}
this.Start();
};
_3LAS.prototype.OnSocketDataReady = function (data) {
if (this.WebRTC)
this.WebRTC.OnSocketDataReady(data);
else
this.Fallback.OnSocketDataReady(data);
};
return _3LAS;
}());

177
web/js/3las/3las.webrtc.js Normal file
View File

@@ -0,0 +1,177 @@
/*
RTC live audio is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var WebRTC_Settings = /** @class */ (function () {
function WebRTC_Settings() {
}
return WebRTC_Settings;
}());
var WebRTC = /** @class */ (function () {
function WebRTC(logger, settings) {
this.Logger = logger;
if (!this.Logger) {
this.Logger = new Logging(null, null);
}
this.AudioTag = settings.AudioTag;
// Create RTC peer connection
if (typeof RTCPeerConnection !== "undefined")
this.RtcPeer = new RTCPeerConnection(settings.RtcConfig);
else if (typeof webkitRTCPeerConnection !== "undefined")
this.RtcPeer = new webkitRTCPeerConnection(settings.RtcConfig);
else if (typeof mozRTCPeerConnection !== "undefined")
this.RtcPeer = new mozRTCPeerConnection(settings.RtcConfig);
else {
this.Logger.Log('3LAS: Browser does not support "WebRTC".');
throw new Error();
}
this.Logger.Log("Using WebRTC");
this.RtcPeer.addTransceiver('audio');
this.RtcPeer.ontrack = this.OnTrack.bind(this);
this.RtcPeer.oniceconnectionstatechange = this.OnConnectionStateChange.bind(this);
}
Object.defineProperty(WebRTC.prototype, "Volume", {
get: function () {
if (!this.CanChangeVolume()) {
if (this.AudioTag.muted == true)
return 0.0;
else
return 1.0;
}
return this.AudioTag.volume;
},
set: function (value) {
if (!this.CanChangeVolume()) {
if (value <= 0.0)
this.AudioTag.muted = true;
else
this.AudioTag.muted = false;
return;
}
this.AudioTag.volume = value;
},
enumerable: false,
configurable: true
});
WebRTC.prototype.CanChangeVolume = function () {
return !(isIOS || isIPadOS);
};
WebRTC.prototype.Init = function (webSocket) {
this.WebSocket = webSocket;
this.WebSocket.Send(JSON.stringify({
"type": "webrtc",
"data": null
}));
this.ActivityTimer = setInterval(this.OnActivityTimerTick.bind(this), 1000);
};
WebRTC.prototype.OnActivityTimerTick = function () {
if ((this.RtcPeer.iceConnectionState == "connected" || this.RtcPeer.iceConnectionState == "completed") && this.ActivityCallback)
this.ActivityCallback();
};
WebRTC.prototype.OnConnectionStateChange = function () {
if ((this.RtcPeer.iceConnectionState == "closed" ||
this.RtcPeer.iceConnectionState == "disconnected" ||
this.RtcPeer.iceConnectionState == "failed") && this.DisconnectCallback)
this.DisconnectCallback();
};
WebRTC.prototype.OnTrack = function (event) {
if (event.streams != null && event.streams.length > 0)
this.AudioTag.srcObject = event.streams[0];
else if (event.track != null)
this.AudioTag.srcObject = new MediaStream([event.track]);
this.AudioTag.play();
};
WebRTC.prototype.OnSocketError = function (message) {
};
WebRTC.prototype.OnSocketConnect = function () {
};
WebRTC.prototype.OnSocketDisconnect = function () {
};
WebRTC.prototype.Reset = function () {
if (this.ActivityTimer) {
clearInterval(this.ActivityTimer);
this.ActivityTimer = 0;
}
if (this.RtcPeer) {
this.RtcPeer.close();
delete this.RtcPeer;
this.RtcPeer = null;
}
this.WebSocket = null;
};
WebRTC.prototype.OnSocketDataReady = function (data) {
return __awaiter(this, void 0, void 0, function () {
var message, answer;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
message = JSON.parse(data.toString());
if (!(message.type == "offer")) return [3 /*break*/, 4];
return [4 /*yield*/, this.RtcPeer.setRemoteDescription(new RTCSessionDescription(message.data))];
case 1:
_a.sent();
return [4 /*yield*/, this.RtcPeer.createAnswer()];
case 2:
answer = _a.sent();
return [4 /*yield*/, this.RtcPeer.setLocalDescription(new RTCSessionDescription(answer))];
case 3:
_a.sent();
this.WebSocket.Send(JSON.stringify({
"type": "answer",
"data": answer
}));
return [3 /*break*/, 5];
case 4:
if (message.type == "candidate") {
(function () { return __awaiter(_this, void 0, void 0, function () { return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this.RtcPeer.addIceCandidate(message.data)];
case 1: return [2 /*return*/, _a.sent()];
}
}); }); })();
}
_a.label = 5;
case 5: return [2 /*return*/];
}
});
});
};
return WebRTC;
}());

View File

@@ -0,0 +1,167 @@
/*
Socket fallback is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var Fallback_Settings = /** @class */ (function () {
function Fallback_Settings() {
this.Formats = [
{ "Mime": "audio/mpeg", "Name": "mp3" },
{ "Mime": "audio/wave", "Name": "wav" }
];
this.MaxVolume = 1.0;
this.AutoCorrectSpeed = false;
this.InitialBufferLength = 1.0 / 3.0;
}
return Fallback_Settings;
}());
var Fallback = /** @class */ (function () {
function Fallback(logger, settings) {
this.Logger = logger;
if (!this.Logger) {
this.Logger = new Logging(null, null);
}
// Create audio context
if (typeof AudioContext !== "undefined")
this.Audio = new AudioContext();
else if (typeof webkitAudioContext !== "undefined")
this.Audio = new webkitAudioContext();
else if (typeof mozAudioContext !== "undefined")
this.Audio = new mozAudioContext();
else {
this.Logger.Log('3LAS: Browser does not support "AudioContext".');
throw new Error();
}
this.Settings = settings;
this.Logger.Log("Detected: " +
(OSName == "MacOSX" ? "Mac OSX" : (OSName == "Unknown" ? "Unknown OS" : OSName)) + ", " +
(BrowserName == "IE" ? "Internet Explorer" : (BrowserName == "NativeChrome" ? "Chrome legacy" : (BrowserName == "Unknown" ? "Unknown Browser" : BrowserName))));
this.SelectedFormatMime = "";
this.SelectedFormatName = "";
for (var i = 0; i < this.Settings.Formats.length; i++) {
if (!AudioFormatReader.CanDecodeTypes([this.Settings.Formats[i].Mime]))
continue;
this.SelectedFormatMime = this.Settings.Formats[i].Mime;
this.SelectedFormatName = this.Settings.Formats[i].Name;
break;
}
if (this.SelectedFormatMime == "" || this.SelectedFormatName == "") {
this.Logger.Log("None of the available MIME types are supported.");
throw new Error();
}
this.Logger.Log("Using websocket fallback with MIME: " + this.SelectedFormatMime);
try {
this.Player = new LiveAudioPlayer(this.Audio, this.Logger, this.Settings.MaxVolume, this.Settings.InitialBufferLength, this.Settings.AutoCorrectSpeed);
this.Logger.Log("Init of LiveAudioPlayer succeeded");
}
catch (e) {
this.Logger.Log("Init of LiveAudioPlayer failed: " + e);
throw new Error();
}
try {
this.FormatReader = AudioFormatReader.Create(this.SelectedFormatMime, this.Audio, this.Logger, this.OnReaderError.bind(this), this.Player.CheckBeforeDecode, this.OnReaderDataReady.bind(this), AudioFormatReader.DefaultSettings());
this.Logger.Log("Init of AudioFormatReader succeeded");
}
catch (e) {
this.Logger.Log("Init of AudioFormatReader failed: " + e);
throw new Error();
}
this.PacketModCounter = 0;
this.LastCheckTime = 0;
this.FocusChecker = 0;
}
Fallback.prototype.Init = function (webSocket) {
this.MobileUnmute();
this.WebSocket = webSocket;
this.WebSocket.Send(JSON.stringify({
"type": "fallback",
"data": this.SelectedFormatName
}));
this.StartFocusChecker();
};
Fallback.prototype.MobileUnmute = function () {
var amplification = this.Audio.createGain();
// Set volume to max
amplification.gain.value = 1.0;
// Connect gain node to context
amplification.connect(this.Audio.destination);
// Create one second buffer with silence
var audioBuffer = this.Audio.createBuffer(2, this.Audio.sampleRate, this.Audio.sampleRate);
// Create new audio source for the buffer
var sourceNode = this.Audio.createBufferSource();
// Make sure the node deletes itself after playback
sourceNode.onended = function (_ev) {
sourceNode.disconnect();
amplification.disconnect();
};
// Pass audio data to source
sourceNode.buffer = audioBuffer;
// Connect the source to the gain node
sourceNode.connect(amplification);
// Play source
sourceNode.start();
};
Object.defineProperty(Fallback.prototype, "Volume", {
get: function () {
return this.Player.Volume / this.Settings.MaxVolume;
},
set: function (value) {
this.Player.Volume = value * this.Settings.MaxVolume;
},
enumerable: false,
configurable: true
});
// Callback functions from format reader
Fallback.prototype.OnReaderError = function () {
this.Logger.Log("Reader error: Decoding failed.");
};
Fallback.prototype.OnReaderDataReady = function () {
while (this.FormatReader.SamplesAvailable()) {
this.Player.PushBuffer(this.FormatReader.PopSamples());
}
};
// Callback function from socket connection
Fallback.prototype.OnSocketError = function (message) {
};
Fallback.prototype.OnSocketConnect = function () {
};
Fallback.prototype.OnSocketDisconnect = function () {
};
Fallback.prototype.OnSocketDataReady = function (data) {
this.PacketModCounter++;
if (this.PacketModCounter > 100) {
if (this.ActivityCallback)
this.ActivityCallback();
this.PacketModCounter = 0;
}
this.FormatReader.PushData(new Uint8Array(data));
};
Fallback.prototype.StartFocusChecker = function () {
if (!this.FocusChecker) {
this.LastCheckTime = Date.now();
this.FocusChecker = window.setInterval(this.CheckFocus.bind(this), 2000);
}
};
Fallback.prototype.StopFocusChecker = function () {
if (this.FocusChecker) {
window.clearInterval(this.FocusChecker);
this.FocusChecker = 0;
}
};
Fallback.prototype.CheckFocus = function () {
var checkTime = Date.now();
// Check if focus was lost
if (checkTime - this.LastCheckTime > 10000) {
// If so, drop all samples in the buffer
this.Logger.Log("Focus lost, purging format reader.");
this.FormatReader.PurgeData();
}
this.LastCheckTime = checkTime;
};
Fallback.prototype.Reset = function () {
this.StopFocusChecker();
this.FormatReader.Reset();
this.Player.Reset();
this.WebSocket = null;
};
return Fallback;
}());

View File

@@ -0,0 +1,193 @@
/*
Audio format reader is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var AudioFormatReader = /** @class */ (function () {
function AudioFormatReader(audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback) {
if (!audio)
throw new Error('AudioFormatReader: audio must be specified');
// Check callback argument
if (typeof errorCallback !== 'function')
throw new Error('AudioFormatReader: errorCallback must be specified');
if (typeof beforeDecodeCheck !== 'function')
throw new Error('AudioFormatReader: beforeDecodeCheck must be specified');
if (typeof dataReadyCallback !== 'function')
throw new Error('AudioFormatReader: dataReadyCallback must be specified');
this.Audio = audio;
this.Logger = logger;
this.ErrorCallback = errorCallback;
this.BeforeDecodeCheck = beforeDecodeCheck;
this.DataReadyCallback = dataReadyCallback;
this.Id = 0;
this.LastPushedId = -1;
this.Samples = new Array();
this.BufferStore = {};
this.DataBuffer = new Uint8Array(0);
}
// Pushes frame data into the buffer
AudioFormatReader.prototype.PushData = function (data) {
// Append data to framedata buffer
this.DataBuffer = this.ConcatUint8Array(this.DataBuffer, data);
// Try to extract frames
this.ExtractAll();
};
// Check if samples are available
AudioFormatReader.prototype.SamplesAvailable = function () {
return (this.Samples.length > 0);
};
// Get a single bunch of sampels from the reader
AudioFormatReader.prototype.PopSamples = function () {
if (this.Samples.length > 0) {
// Get first bunch of samples, remove said bunch from the array and hand it back to callee
return this.Samples.shift();
}
else
return null;
};
// Deletes all encoded and decoded data from the reader (does not effect headers, etc.)
AudioFormatReader.prototype.PurgeData = function () {
this.Id = 0;
this.LastPushedId = -1;
this.Samples = new Array();
this.BufferStore = {};
this.DataBuffer = new Uint8Array(0);
};
// Used to force frame extraction externaly
AudioFormatReader.prototype.Poke = function () {
this.ExtractAll();
};
// Deletes all data from the reader (does effect headers, etc.)
AudioFormatReader.prototype.Reset = function () {
this.PurgeData();
};
// Extracts and converts the raw data
AudioFormatReader.prototype.ExtractAll = function () {
};
// Checks if a decode makes sense
AudioFormatReader.prototype.OnBeforeDecode = function (id, duration) {
return true;
//TODO Fix this
/*
if(this.BeforeDecodeCheck(duration)) {
return true;
}
else {
this.OnDataReady(id, this.Audio.createBuffer(1, Math.ceil(duration * this.Audio.sampleRate), this.Audio.sampleRate));
return false;
}
*/
};
// Stores the converted bnuches of samples in right order
AudioFormatReader.prototype.OnDataReady = function (id, audioBuffer) {
if (this.LastPushedId + 1 == id) {
// Push samples into array
this.Samples.push(audioBuffer);
this.LastPushedId++;
while (this.BufferStore[this.LastPushedId + 1]) {
// Push samples we decoded earlier in correct order
this.Samples.push(this.BufferStore[this.LastPushedId + 1]);
delete this.BufferStore[this.LastPushedId + 1];
this.LastPushedId++;
}
// Callback to tell that data is ready
this.DataReadyCallback();
}
else {
// Is out of order, will be pushed later
this.BufferStore[id] = audioBuffer;
}
};
// Used to concatenate two Uint8Array (b comes BEHIND a)
AudioFormatReader.prototype.ConcatUint8Array = function (a, b) {
var tmp = new Uint8Array(a.length + b.length);
tmp.set(a, 0);
tmp.set(b, a.length);
return tmp;
};
AudioFormatReader.CanDecodeTypes = function (mimeTypes) {
var audioTag = new Audio();
var result = false;
for (var i = 0; i < mimeTypes.length; i++) {
var mimeType = mimeTypes[i];
var answer = audioTag.canPlayType(mimeType);
if (answer != "probably" && answer != "maybe")
continue;
result = true;
break;
}
audioTag = null;
return result;
};
AudioFormatReader.DefaultSettings = function () {
var settings = {};
// WAV
settings["wav"] = {};
// Duration of wave samples to decode together
settings["wav"]["BatchDuration"] = 1 / 10; // 0.1 seconds
/*
if (isAndroid && isNativeChrome)
settings["wav"]["BatchDuration"] = 96 / 375;
else if (isAndroid && isFirefox)
settings["wav"]["BatchDuration"] = 96 / 375;
else
settings["wav"]["BatchDuration"] = 16 / 375;
*/
// Duration of addtional samples to decode to account for edge effects
settings["wav"]["ExtraEdgeDuration"] = 1 / 300; // 0.00333... seconds
/*
if (isAndroid && isNativeChrome)
settings["wav"]["ExtraEdgeDuration"] = 1 / 1000;
else if (isAndroid && isFirefox)
settings["wav"]["ExtraEdgeDuration"] = 1 / 1000;
else
settings["wav"]["ExtraEdgeDuration"] = 1 / 1000;
*/
// MPEG
settings["mpeg"] = {};
// Adds a minimal ID3v2 tag before decoding frames.
settings["mpeg"]["AddID3Tag"] = false;
// Minimum number of frames to decode together
// Theoretical minimum is 2.
// Recommended value is 3 or higher.
if (isAndroid)
settings["mpeg"]["MinDecodeFrames"] = 17;
else
settings["mpeg"]["MinDecodeFrames"] = 3;
return settings;
};
AudioFormatReader.Create = function (mime, audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback, settings) {
if (settings === void 0) { settings = null; }
if (typeof mime !== "string")
throw new Error('CreateAudioFormatReader: Invalid MIME-Type, must be string');
if (!settings)
settings = this.DefaultSettings();
var fullMime = mime;
if (mime.indexOf("audio/pcm") == 0)
mime = "audio/pcm";
// Load format handler according to MIME-Type
switch (mime.replace(/\s/g, "")) {
// MPEG Audio (mp3)
case "audio/mpeg":
case "audio/MPA":
case "audio/mpa-robust":
if (!AudioFormatReader.CanDecodeTypes(new Array("audio/mpeg", "audio/MPA", "audio/mpa-robust")))
throw new Error('CreateAudioFormatReader: Browser can not decode specified MIME-Type (' + mime + ')');
return new AudioFormatReader_MPEG(audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback, settings["mpeg"]["AddID3Tag"], settings["mpeg"]["MinDecodeFrames"]);
break;
// Waveform Audio File Format
case "audio/vnd.wave":
case "audio/wav":
case "audio/wave":
case "audio/x-wav":
if (!AudioFormatReader.CanDecodeTypes(new Array("audio/wav", "audio/wave")))
throw new Error('CreateAudioFormatReader: Browser can not decode specified MIME-Type (' + mime + ')');
return new AudioFormatReader_WAV(audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback, settings["wav"]["BatchDuration"], settings["wav"]["ExtraEdgeDuration"]);
break;
// Unknown codec
default:
throw new Error('CreateAudioFormatReader: Specified MIME-Type (' + mime + ') not supported');
break;
}
};
return AudioFormatReader;
}());

View File

@@ -0,0 +1,147 @@
/*
Live audio player is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var LiveAudioPlayer = /** @class */ (function () {
function LiveAudioPlayer(audio, logger, maxVolume, startOffset, variableSpeed) {
if (maxVolume === void 0) { maxVolume = 1.0; }
if (startOffset === void 0) { startOffset = 0.33; }
if (variableSpeed === void 0) { variableSpeed = false; }
this.Audio = audio;
this.Logger = logger;
this.MaxVolume = maxVolume;
this.StartOffset = startOffset;
this.VariableSpeed = variableSpeed;
this.OffsetMin = this.StartOffset - LiveAudioPlayer.OffsetVariance;
this.OffsetMax = this.StartOffset + LiveAudioPlayer.OffsetVariance;
// Set speed to default
this.PlaybackSpeed = 1.0;
// Reset variable for scheduling times
this.NextScheduleTime = 0.0;
// Create gain node for volume control
this.Amplification = this.Audio.createGain();
// Set volume to max
this.Amplification.gain.value = 1.0;
// Connect gain node to context
this.Amplification.connect(this.Audio.destination);
}
Object.defineProperty(LiveAudioPlayer.prototype, "Volume", {
get: function () {
// Get volume from gain node
return this.Amplification.gain.value;
},
set: function (value) {
// Clamp value to [1e-20 ; MaxVolume]
if (value > 1.0)
value = this.MaxVolume;
else if (value <= 0.0)
value = 1e-20;
// Cancel any scheduled ramps
this.Amplification.gain.cancelScheduledValues(this.Audio.currentTime);
// Change volume following a ramp (more userfriendly)
this.Amplification.gain.exponentialRampToValueAtTime(value, this.Audio.currentTime + 0.5);
},
enumerable: false,
configurable: true
});
// Recieves an audiobuffer and schedules it for seamless playback
LiveAudioPlayer.prototype.PushBuffer = function (buffer) {
// Check if this is the first buffer we received
if (this.NextScheduleTime == 0.0) {
// Start playing [StartOffset] s from now
this.NextScheduleTime = this.Audio.currentTime + this.StartOffset;
}
var duration;
if (this.VariableSpeed)
duration = buffer.duration / this.PlaybackSpeed; // Use regular duration
else
duration = buffer.duration; // Use duration adjusted for playback speed
// Before creating a buffer and scheduling playback, check if playing this buffer makes sense at all
// If a buffer should have been started so far in the past that it would have finished playing by now, we are better of skipping it.
// But we still need to move the time forward to keep future timings right.
if (this.NextScheduleTime + duration > this.Audio.currentTime) {
var skipDurationTime = void 0;
// If the playback start time is in the past but the playback end time is in the future, we need to partially play the buffer.
if (this.Audio.currentTime >= this.NextScheduleTime) {
// Calculate the time we need to skip
skipDurationTime = this.Audio.currentTime - this.NextScheduleTime + 0.05;
}
else {
// No skipping needed
skipDurationTime = 0.0;
}
// Check if we'd skip the whole buffer anyway
if (skipDurationTime < duration) {
// Create new audio source for the buffer
var sourceNode_1 = this.Audio.createBufferSource();
// Make sure the node deletes itself after playback
sourceNode_1.onended = function (_ev) {
sourceNode_1.disconnect();
};
// Prevent looping (the standard says that it should be off by default)
sourceNode_1.loop = false;
// Pass audio data to source
sourceNode_1.buffer = buffer;
//Connect the source to the gain node
sourceNode_1.connect(this.Amplification);
if (this.VariableSpeed) {
var scheduleOffset = this.NextScheduleTime - this.Audio.currentTime;
// Check if we are to far or too close to target schedule time
if (this.NextScheduleTime - this.Audio.currentTime > this.OffsetMax) {
if (this.PlaybackSpeed < 1.0 + LiveAudioPlayer.SpeedCorrectionFactor) {
// We are too slow, speed up playback (somewhat noticeable)
this.Logger.Log("Buffer size too large, speeding up playback.");
this.PlaybackSpeed = 1.0 + LiveAudioPlayer.SpeedCorrectionFactor;
duration = buffer.duration / this.PlaybackSpeed;
}
}
else if (this.NextScheduleTime - this.Audio.currentTime < this.OffsetMin) {
if (this.PlaybackSpeed > 1.0 - LiveAudioPlayer.SpeedCorrectionFactor) {
// We are too fast, slow down playback (somewhat noticeable)
this.Logger.Log("Buffer size too small, slowing down playback.");
this.PlaybackSpeed = 1.0 - LiveAudioPlayer.SpeedCorrectionFactor;
duration = buffer.duration / this.PlaybackSpeed;
}
}
else {
// Check if we are in time
if ((this.PlaybackSpeed > 1.0 && (this.NextScheduleTime - this.Audio.currentTime < this.StartOffset)) ||
(this.PlaybackSpeed < 1.0 && (this.NextScheduleTime - this.Audio.currentTime > this.StartOffset))) {
// We are within our min/max offset, set playpacks to default
this.Logger.Log("Buffer size within limits, using normal playback speed.");
this.PlaybackSpeed = 1.0;
duration = buffer.duration;
}
}
// Set playback speed
sourceNode_1.playbackRate.value = this.PlaybackSpeed;
}
// Schedule playback
sourceNode_1.start(this.NextScheduleTime + skipDurationTime, skipDurationTime);
}
else {
this.Logger.Log("Skipped buffer because it became too old.");
}
}
else {
this.Logger.Log("Skipped buffer because it was too old.");
}
// Move time forward
this.NextScheduleTime += duration;
};
LiveAudioPlayer.prototype.Reset = function () {
this.NextScheduleTime = 0.0;
};
LiveAudioPlayer.prototype.CheckBeforeDecode = function (playbackLength) {
if (this.NextScheduleTime == 0)
return true;
return this.NextScheduleTime + playbackLength > this.Audio.currentTime;
};
// Crystal oscillator have a variance of about +/- 20ppm
// So worst case would be a difference of 40ppm between two oscillators.
LiveAudioPlayer.SpeedCorrectionFactor = 40 / 1.0e6;
// Hystersis value for speed up/down trigger
LiveAudioPlayer.OffsetVariance = 0.2;
return LiveAudioPlayer;
}());
//# sourceMappingURL=3las.liveaudioplayer.js.map

View File

@@ -0,0 +1,284 @@
/*
MPEG audio format reader is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var MPEGFrameInfo = /** @class */ (function () {
function MPEGFrameInfo(data, sampleCount, sampleRate) {
this.Data = data;
this.SampleCount = sampleCount;
this.SampleRate = sampleRate;
}
return MPEGFrameInfo;
}());
var AudioFormatReader_MPEG = /** @class */ (function (_super) {
__extends(AudioFormatReader_MPEG, _super);
function AudioFormatReader_MPEG(audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback, addId3Tag, minDecodeFrames) {
var _this = _super.call(this, audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback) || this;
_this._OnDecodeSuccess = _this.OnDecodeSuccess.bind(_this);
_this._OnDecodeError = _this.OnDecodeError.bind(_this);
_this.AddId3Tag = addId3Tag;
_this.MinDecodeFrames = minDecodeFrames;
_this.Frames = new Array();
_this.FrameStartIdx = -1;
_this.FrameEndIdx = -1;
_this.FrameSamples = 0;
_this.FrameSampleRate = 0;
_this.TimeBudget = 0;
return _this;
}
// Deletes all frames from the databuffer and framearray and all samples from the samplearray
AudioFormatReader_MPEG.prototype.PurgeData = function () {
_super.prototype.PurgeData.call(this);
this.Frames = new Array();
this.FrameStartIdx = -1;
this.FrameEndIdx = -1;
this.FrameSamples = 0;
this.FrameSampleRate = 0;
this.TimeBudget = 0;
};
// Extracts all currently possible frames
AudioFormatReader_MPEG.prototype.ExtractAll = function () {
// Look for frames
this.FindFrame();
// Repeat as long as we can extract frames
while (this.CanExtractFrame()) {
// Extract frame and push into array
this.Frames.push(this.ExtractFrame());
// Look for frames
this.FindFrame();
}
// Check if we have enough frames to decode
if (this.Frames.length >= this.MinDecodeFrames) {
// Note:
// =====
// mp3 frames have an overlap of [granule size] so we can't use the first or last [granule size] samples.
// [granule size] is equal to half of a [frame size] in samples (using the mp3's sample rate).
// Sum up the playback time of each decoded frame and data buffer lengths
// Note: Since mp3-Frames overlap by half of their sample-length we expect the
// first and last frame to be only half as long. Some decoders will still output
// the full frame length by adding zeros.
var bufferLength = 0;
var expectedTotalPlayTime_1 = 0;
expectedTotalPlayTime_1 += this.Frames[0].SampleCount / this.Frames[0].SampleRate / 2.0; // Only half of data is usable due to overlap
bufferLength += this.Frames[0].Data.length;
for (var i = 1; i < this.Frames.length - 1; i++) {
expectedTotalPlayTime_1 += this.Frames[i].SampleCount / this.Frames[i].SampleRate;
bufferLength += this.Frames[i].Data.length;
}
expectedTotalPlayTime_1 += this.Frames[this.Frames.length - 1].SampleCount / this.Frames[this.Frames.length - 1].SampleRate / 2.0; // Only half of data is usable due to overlap
bufferLength += this.Frames[this.Frames.length - 1].Data.length;
// If needed, add some space for the ID3v2 tag
if (this.AddId3Tag) {
bufferLength += AudioFormatReader_MPEG.Id3v2Tag.length;
}
// Create a buffer long enough to hold everything
var decodeBuffer = new Uint8Array(bufferLength);
var offset = 0;
// If needed, add ID3v2 tag to beginning of buffer
if (this.AddId3Tag) {
decodeBuffer.set(AudioFormatReader_MPEG.Id3v2Tag, offset);
offset += AudioFormatReader_MPEG.Id3v2Tag.length;
}
// Add the frames to the window
for (var i = 0; i < this.Frames.length; i++) {
decodeBuffer.set(this.Frames[i].Data, offset);
offset += this.Frames[i].Data.length;
}
// Remove the used frames from the array
this.Frames.splice(0, this.Frames.length - 1);
// Increment Id
var id_1 = this.Id++;
// Check if decoded frames might be too far back in the past
if (!this.OnBeforeDecode(id_1, expectedTotalPlayTime_1))
return;
// Push window to the decoder
this.Audio.decodeAudioData(decodeBuffer.buffer, (function (decodedData) {
var _id = id_1;
var _expectedTotalPlayTime = expectedTotalPlayTime_1;
this._OnDecodeSuccess(decodedData, _id, _expectedTotalPlayTime);
}).bind(this), this._OnDecodeError.bind(this));
}
};
// Finds frame boundries within the data buffer
AudioFormatReader_MPEG.prototype.FindFrame = function () {
// Find frame start
if (this.FrameStartIdx < 0) {
var i = 0;
// Make sure we don't exceed array bounds
while ((i + 1) < this.DataBuffer.length) {
// Look for MPEG sync word
if (this.DataBuffer[i] == 0xFF && (this.DataBuffer[i + 1] & 0xE0) == 0xE0) {
// Sync found, set frame start
this.FrameStartIdx = i;
break;
}
i++;
}
}
// Find frame end
if (this.FrameStartIdx >= 0 && this.FrameEndIdx < 0) {
// Check if we have enough data to process the header
if ((this.FrameStartIdx + 2) < this.DataBuffer.length) {
// Get header data
// Version index
var ver = (this.DataBuffer[this.FrameStartIdx + 1] & 0x18) >>> 3;
// Layer index
var lyr = (this.DataBuffer[this.FrameStartIdx + 1] & 0x06) >>> 1;
// Padding? 0/1
var pad = (this.DataBuffer[this.FrameStartIdx + 2] & 0x02) >>> 1;
// Bitrate index
var brx = (this.DataBuffer[this.FrameStartIdx + 2] & 0xF0) >>> 4;
// SampRate index
var srx = (this.DataBuffer[this.FrameStartIdx + 2] & 0x0C) >>> 2;
// Resolve flags to real values
var bitrate = AudioFormatReader_MPEG.MPEG_bitrates[ver][lyr][brx] * 1000;
var samprate = AudioFormatReader_MPEG.MPEG_srates[ver][srx];
var samples = AudioFormatReader_MPEG.MPEG_frame_samples[ver][lyr];
var slot_size = AudioFormatReader_MPEG.MPEG_slot_size[lyr];
// In-between calculations
var bps = samples / 8.0;
var fsize = ((bps * bitrate) / samprate) + ((pad == 1) ? slot_size : 0);
// Truncate to integer
var frameSize = Math.floor(fsize);
// Store number of samples and samplerate for frame
this.FrameSamples = samples;
this.FrameSampleRate = samprate;
// Set end frame boundry
this.FrameEndIdx = this.FrameStartIdx + frameSize;
}
}
};
// Checks if there is a frame ready to be extracted
AudioFormatReader_MPEG.prototype.CanExtractFrame = function () {
if (this.FrameStartIdx < 0 || this.FrameEndIdx < 0)
return false;
else if (this.FrameEndIdx <= this.DataBuffer.length)
return true;
else
return false;
};
// Extract a single frame from the buffer
AudioFormatReader_MPEG.prototype.ExtractFrame = function () {
// Extract frame data from buffer
var frameArray = this.DataBuffer.buffer.slice(this.FrameStartIdx, this.FrameEndIdx);
// Remove frame from buffer
if ((this.FrameEndIdx + 1) < this.DataBuffer.length)
this.DataBuffer = new Uint8Array(this.DataBuffer.buffer.slice(this.FrameEndIdx));
else
this.DataBuffer = new Uint8Array(0);
// Reset Start/End indices
this.FrameStartIdx = 0;
this.FrameEndIdx = -1;
return new MPEGFrameInfo(new Uint8Array(frameArray), this.FrameSamples, this.FrameSampleRate);
};
// Is called if the decoding of the window succeeded
AudioFormatReader_MPEG.prototype.OnDecodeSuccess = function (decodedData, id, expectedTotalPlayTime) {
var extractSampleCount;
var extractSampleOffset;
// Check if we got the expected number of samples
if (expectedTotalPlayTime > decodedData.duration) {
// We got less samples than expect, we suspect that they were truncated equally at start and end.
// This can happen in case of sample rate conversions.
extractSampleCount = decodedData.length;
extractSampleOffset = 0;
this.TimeBudget += (expectedTotalPlayTime - decodedData.duration);
}
else if (expectedTotalPlayTime < decodedData.duration) {
// We got more samples than expect, we suspect that zeros were added equally at start and end.
// This can happen in case of sample rate conversions or edge frame handling.
extractSampleCount = Math.ceil(expectedTotalPlayTime * decodedData.sampleRate);
var budgetSamples = this.TimeBudget * decodedData.sampleRate;
if (budgetSamples > 1.0) {
if (budgetSamples > decodedData.length - extractSampleCount) {
budgetSamples = decodedData.length - extractSampleCount;
}
extractSampleCount += budgetSamples;
this.TimeBudget -= (budgetSamples / decodedData.sampleRate);
}
extractSampleOffset = Math.floor((decodedData.length - extractSampleCount) / 2);
}
else {
// We got the expected number of samples, no adaption needed
extractSampleCount = decodedData.length;
extractSampleOffset = 0;
}
// Create a buffer that can hold the frame to extract
var audioBuffer = this.Audio.createBuffer(decodedData.numberOfChannels, extractSampleCount, decodedData.sampleRate);
// Fill buffer with the last part of the decoded frame leave out last granule
for (var i = 0; i < decodedData.numberOfChannels; i++)
audioBuffer.getChannelData(i).set(decodedData.getChannelData(i).subarray(extractSampleOffset, extractSampleOffset + extractSampleCount));
this.OnDataReady(id, audioBuffer);
};
// Is called in case the decoding of the window fails
AudioFormatReader_MPEG.prototype.OnDecodeError = function (_error) {
this.ErrorCallback();
};
// MPEG versions - use [version]
AudioFormatReader_MPEG.MPEG_versions = new Array(25, 0, 2, 1);
// Layers - use [layer]
AudioFormatReader_MPEG.MPEG_layers = new Array(0, 3, 2, 1);
// Bitrates - use [version][layer][bitrate]
AudioFormatReader_MPEG.MPEG_bitrates = new Array(new Array(// Version 2.5
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Reserved
new Array(0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0), // Layer 3
new Array(0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0), // Layer 2
new Array(0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0) // Layer 1
), new Array(// Reserved
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Invalid
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Invalid
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Invalid
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) // Invalid
), new Array(// Version 2
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Reserved
new Array(0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0), // Layer 3
new Array(0, 8, 16, 24, 32, 40, 48, 56, 64, 80, 96, 112, 128, 144, 160, 0), // Layer 2
new Array(0, 32, 48, 56, 64, 80, 96, 112, 128, 144, 160, 176, 192, 224, 256, 0) // Layer 1
), new Array(// Version 1
new Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0), // Reserved
new Array(0, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 0), // Layer 3
new Array(0, 32, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 0), // Layer 2
new Array(0, 32, 64, 96, 128, 160, 192, 224, 256, 288, 320, 352, 384, 416, 448, 0) // Layer 1
));
// Sample rates - use [version][srate]
AudioFormatReader_MPEG.MPEG_srates = new Array(new Array(11025, 12000, 8000, 0), // MPEG 2.5
new Array(0, 0, 0, 0), // Reserved
new Array(22050, 24000, 16000, 0), // MPEG 2
new Array(44100, 48000, 32000, 0) // MPEG 1
);
// Samples per frame - use [version][layer]
AudioFormatReader_MPEG.MPEG_frame_samples = new Array(
// Rsvd 3 2 1 < Layer v Version
new Array(0, 576, 1152, 384), // 2.5
new Array(0, 0, 0, 0), // Reserved
new Array(0, 576, 1152, 384), // 2
new Array(0, 1152, 1152, 384) // 1
);
AudioFormatReader_MPEG.Id3v2Tag = new Uint8Array(new Array(0x49, 0x44, 0x33, // File identifier: "ID3"
0x03, 0x00, // Version 2.3
0x00, // Flags: no unsynchronisation, no extended header, no experimental indicator
0x00, 0x00, 0x00, 0x0D, // Size of the (tag-)frames, extended header and padding
0x54, 0x49, 0x54, 0x32, // Title frame: "TIT2"
0x00, 0x00, 0x00, 0x02, // Size of the frame data
0x00, 0x00, // Frame Flags
0x00, 0x20, 0x00 // Frame data (space character) and padding
));
// Slot size (MPEG unit of measurement) - use [layer]
AudioFormatReader_MPEG.MPEG_slot_size = new Array(0, 1, 1, 4); // Rsvd, 3, 2, 1
return AudioFormatReader_MPEG;
}(AudioFormatReader));
//# sourceMappingURL=3las.formatreader.mpeg.js.map

View File

@@ -0,0 +1,223 @@
/*
WAV audio format reader is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var AudioFormatReader_WAV = /** @class */ (function (_super) {
__extends(AudioFormatReader_WAV, _super);
function AudioFormatReader_WAV(audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback, batchDuration, extraEdgeDuration) {
var _this = _super.call(this, audio, logger, errorCallback, beforeDecodeCheck, dataReadyCallback) || this;
_this._OnDecodeSuccess = _this.OnDecodeSuccess.bind(_this);
_this._OnDecodeError = _this.OnDecodeError.bind(_this);
_this.BatchDuration = batchDuration;
_this.ExtraEdgeDuration = extraEdgeDuration;
_this.GotHeader = false;
_this.RiffHeader = null;
_this.WaveSampleRate = 0;
_this.WaveBitsPerSample = 0;
_this.WaveBytesPerSample = 0;
_this.WaveBlockAlign = 0;
_this.WaveChannels = 0;
_this.BatchSamples = 0;
_this.BatchBytes = 0;
_this.ExtraEdgeSamples = 0;
_this.TotalBatchSampleSize = 0;
_this.TotalBatchByteSize = 0;
_this.SampleBudget = 0;
return _this;
}
// Deletes all samples from the databuffer and the samplearray
AudioFormatReader_WAV.prototype.PurgeData = function () {
_super.prototype.PurgeData.call(this);
this.SampleBudget = 0;
};
// Deletes all data from the reader (deos effect headers, etc.)
AudioFormatReader_WAV.prototype.Reset = function () {
_super.prototype.Reset.call(this);
this.GotHeader = false;
this.RiffHeader = null;
this.WaveSampleRate = 0;
this.WaveBitsPerSample = 0;
this.WaveBytesPerSample = 0;
this.WaveBlockAlign = 0;
this.WaveChannels = 0;
this.BatchSamples = 0;
this.BatchBytes = 0;
this.ExtraEdgeSamples = 0;
this.TotalBatchSampleSize = 0;
this.TotalBatchByteSize = 0;
this.SampleBudget = 0;
};
AudioFormatReader_WAV.prototype.ExtractAll = function () {
if (!this.GotHeader)
this.FindAndExtractHeader();
else {
var _loop_1 = function () {
// Extract samples
var tmpSamples = this_1.ExtractIntSamples();
// Increment Id
var id = this_1.Id++;
if (!this_1.OnBeforeDecode(id, this_1.BatchDuration))
return "continue";
// Note:
// =====
// When audio data is resampled we get edge-effects at beginnging and end.
// We should be able to compensate for that by keeping the last sample of the
// previous batch and adding it to the beginning of the current one, but then
// cutting it out AFTER the resampling (since the same effects apply to it)
// The effects at the end can be compensated by cutting the resampled samples shorter
// This is not trivial for non-natural ratios (e.g. 16kHz -> 44.1kHz). Because we would have
// to cut out a non-natural number of samples at beginning and end.
// TODO: All of the above...
// Create a buffer long enough to hold everything
var samplesBuffer = new Uint8Array(this_1.RiffHeader.length + tmpSamples.length);
var offset = 0;
// Add header
samplesBuffer.set(this_1.RiffHeader, offset);
offset += this_1.RiffHeader.length;
// Add samples
samplesBuffer.set(tmpSamples, offset);
// Push pages to the decoder
this_1.Audio.decodeAudioData(samplesBuffer.buffer, (function (decodedData) {
var _id = id;
this._OnDecodeSuccess(decodedData, _id);
}).bind(this_1), this_1._OnDecodeError);
};
var this_1 = this;
while (this.CanExtractSamples()) {
_loop_1();
}
}
};
// Finds riff header within the data buffer and extracts it
AudioFormatReader_WAV.prototype.FindAndExtractHeader = function () {
var curpos = 0;
// Make sure a whole header can fit
if (!((curpos + 4) < this.DataBuffer.length))
return;
// Check chunkID, should be "RIFF"
if (!(this.DataBuffer[curpos] == 0x52 && this.DataBuffer[curpos + 1] == 0x49 && this.DataBuffer[curpos + 2] == 0x46 && this.DataBuffer[curpos + 3] == 0x46))
return;
curpos += 8;
if (!((curpos + 4) < this.DataBuffer.length))
return;
// Check riffType, should be "WAVE"
if (!(this.DataBuffer[curpos] == 0x57 && this.DataBuffer[curpos + 1] == 0x41 && this.DataBuffer[curpos + 2] == 0x56 && this.DataBuffer[curpos + 3] == 0x45))
return;
curpos += 4;
if (!((curpos + 4) < this.DataBuffer.length))
return;
// Check for format subchunk, should be "fmt "
if (!(this.DataBuffer[curpos] == 0x66 && this.DataBuffer[curpos + 1] == 0x6d && this.DataBuffer[curpos + 2] == 0x74 && this.DataBuffer[curpos + 3] == 0x20))
return;
curpos += 4;
if (!((curpos + 4) < this.DataBuffer.length))
return;
var subChunkSize = this.DataBuffer[curpos] | this.DataBuffer[curpos + 1] << 8 | this.DataBuffer[curpos + 2] << 16 | this.DataBuffer[curpos + 3] << 24;
if (!((curpos + 4 + subChunkSize) < this.DataBuffer.length))
return;
curpos += 6;
this.WaveChannels = this.DataBuffer[curpos] | this.DataBuffer[curpos + 1] << 8;
curpos += 2;
this.WaveSampleRate = this.DataBuffer[curpos] | this.DataBuffer[curpos + 1] << 8 | this.DataBuffer[curpos + 2] << 16 | this.DataBuffer[curpos + 3] << 24;
curpos += 8;
this.WaveBlockAlign = this.DataBuffer[curpos] | this.DataBuffer[curpos + 1] << 8;
curpos += 2;
this.WaveBitsPerSample = this.DataBuffer[curpos] | this.DataBuffer[curpos + 1] << 8;
this.WaveBytesPerSample = this.WaveBitsPerSample / 8;
curpos += subChunkSize - 14;
while (true) {
if ((curpos + 8) < this.DataBuffer.length) {
subChunkSize = this.DataBuffer[curpos + 4] | this.DataBuffer[curpos + 5] << 8 | this.DataBuffer[curpos + 6] << 16 | this.DataBuffer[curpos + 7] << 24;
// Check for data subchunk, should be "data"
if (this.DataBuffer[curpos] == 0x64 && this.DataBuffer[curpos + 1] == 0x61 && this.DataBuffer[curpos + 2] == 0x74 && this.DataBuffer[curpos + 3] == 0x61) // Data chunk found
break;
else
curpos += 8 + subChunkSize;
}
else
return;
}
curpos += 8;
this.RiffHeader = new Uint8Array(this.DataBuffer.buffer.slice(0, curpos));
this.BatchSamples = Math.ceil(this.BatchDuration * this.WaveSampleRate);
this.ExtraEdgeSamples = Math.ceil(this.ExtraEdgeDuration * this.WaveSampleRate);
this.BatchBytes = this.BatchSamples * this.WaveBlockAlign;
this.TotalBatchSampleSize = (this.BatchSamples + this.ExtraEdgeSamples);
this.TotalBatchByteSize = this.TotalBatchSampleSize * this.WaveBlockAlign;
var chunkSize = this.RiffHeader.length + this.TotalBatchByteSize - 8;
// Fix header chunksizes
this.RiffHeader[4] = chunkSize & 0xFF;
this.RiffHeader[5] = (chunkSize & 0xFF00) >>> 8;
this.RiffHeader[6] = (chunkSize & 0xFF0000) >>> 16;
this.RiffHeader[7] = (chunkSize & 0xFF000000) >>> 24;
this.RiffHeader[this.RiffHeader.length - 4] = (this.TotalBatchByteSize & 0xFF);
this.RiffHeader[this.RiffHeader.length - 3] = (this.TotalBatchByteSize & 0xFF00) >>> 8;
this.RiffHeader[this.RiffHeader.length - 2] = (this.TotalBatchByteSize & 0xFF0000) >>> 16;
this.RiffHeader[this.RiffHeader.length - 1] = (this.TotalBatchByteSize & 0xFF000000) >>> 24;
this.GotHeader = true;
};
// Checks if there is a samples ready to be extracted
AudioFormatReader_WAV.prototype.CanExtractSamples = function () {
if (this.DataBuffer.length >= this.TotalBatchByteSize)
return true;
else
return false;
};
// Extract a single batch of samples from the buffer
AudioFormatReader_WAV.prototype.ExtractIntSamples = function () {
// Extract sample data from buffer
var intSampleArray = new Uint8Array(this.DataBuffer.buffer.slice(0, this.TotalBatchByteSize));
// Remove samples from buffer
this.DataBuffer = new Uint8Array(this.DataBuffer.buffer.slice(this.BatchBytes));
return intSampleArray;
};
// Is called if the decoding of the samples succeeded
AudioFormatReader_WAV.prototype.OnDecodeSuccess = function (decodedData, id) {
// Calculate the length of the parts
var pickSize = this.BatchDuration * decodedData.sampleRate;
this.SampleBudget += (pickSize - Math.ceil(pickSize));
pickSize = Math.ceil(pickSize);
var pickOffset = (decodedData.length - pickSize) / 2.0;
if (pickOffset < 0)
pickOffset = 0; // This should never happen!
else
pickOffset = Math.floor(pickOffset);
if (this.SampleBudget < -1.0) {
var correction = -1.0 * Math.floor(Math.abs(this.SampleBudget));
this.SampleBudget -= correction;
pickSize += correction;
}
else if (this.SampleBudget > 1.0) {
var correction = Math.floor(this.SampleBudget);
this.SampleBudget -= correction;
pickSize += correction;
}
// Create a buffer that can hold a single part
var audioBuffer = this.Audio.createBuffer(decodedData.numberOfChannels, pickSize, decodedData.sampleRate);
// Fill buffer with the last part of the decoded frame
for (var i = 0; i < decodedData.numberOfChannels; i++)
audioBuffer.getChannelData(i).set(decodedData.getChannelData(i).slice(pickOffset, -pickOffset));
this.OnDataReady(id, audioBuffer);
};
// Is called in case the decoding of the window fails
AudioFormatReader_WAV.prototype.OnDecodeError = function (_error) {
this.ErrorCallback();
};
return AudioFormatReader_WAV;
}(AudioFormatReader));
//# sourceMappingURL=3las.formatreader.wav.js.map

49
web/js/3las/main.js Normal file
View File

@@ -0,0 +1,49 @@
var Stream;
var DefaultVolume = 0.5;
function Init(_ev) {
// Load default settings
var settings = new _3LAS_Settings();
if (typeof RtcConfig == 'undefined')
RtcConfig = {};
settings.WebRTC.RtcConfig = RtcConfig;
if (typeof SocketPort != 'undefined')
settings.SocketPort = SocketPort;
if (typeof SocketPath != 'undefined')
settings.SocketPath = SocketPath;
if (typeof AudioTagId == 'undefined')
settings.WebRTC.AudioTag = null;
else
settings.WebRTC.AudioTag = document.getElementById(AudioTagId);
try {
Stream = new _3LAS(null, settings);
}
catch (_ex) {
console.log(_ex);
return;
}
Stream.ConnectivityCallback = OnConnectivityCallback;
document.getElementById("playbutton").onclick = OnPlayButtonClick;
$("#volumeSlider").on("change", updateVolume);
}
function OnConnectivityCallback(isConnected) {
if (isConnected) {
Stream.Volume = 1.0;
}
}
function OnPlayButtonClick(_ev) {
try {
Stream.Start();
$('#playbutton').prop('disabled', true);
$('#playbutton').find('.fa-solid').removeClass('fa-play').addClass('fa-pause');
}
catch (_ex) {
}
}
function updateVolume() {
Stream.Volume = $(this).val();
}
var lastTapTime = -1;

View File

@@ -0,0 +1,130 @@
/*
Helpers is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var isAndroid;
var isIOS;
var isIPadOS;
var isWindows;
var isLinux;
var isBSD;
var isMacOSX;
var isInternetExplorer;
var isEdge;
;
var isSafari;
;
var isOpera;
;
var isChrome;
;
var isFirefox;
;
var webkitVer;
var isNativeChrome;
;
var BrowserName;
var OSName;
{
var ua = navigator.userAgent.toLowerCase();
isAndroid = (ua.match('android') ? true : false);
isIOS = (ua.match(/(iphone|ipod)/g) ? true : false);
isIPadOS = ((ua.match('ipad') || (navigator.platform == 'MacIntel' && navigator.maxTouchPoints > 1)) ? true : false);
isWindows = (ua.match('windows') ? true : false);
isLinux = (ua.match('android') ? false : (ua.match('linux') ? true : false));
isBSD = (ua.match('bsd') ? true : false);
isMacOSX = !isIOS && !isIPadOS && (ua.match('mac osx') ? true : false);
isInternetExplorer = (ua.match('msie') ? true : false);
isEdge = (ua.match('edg') ? true : false);
isSafari = (ua.match(/(chromium|chrome|crios)/g) ? false : (ua.match('safari') ? true : false));
isOpera = (ua.match('opera') ? true : false);
isChrome = !isSafari && (ua.match(/(chromium|chrome|crios)/g) ? true : false);
isFirefox = (ua.match('like gecko') ? false : (ua.match(/(gecko|fennec|firefox)/g) ? true : false));
webkitVer = parseInt((/WebKit\/([0-9]+)/.exec(navigator.appVersion) || ["", "0"])[1], 10) || void 0; // also match AppleWebKit
isNativeChrome = isAndroid && webkitVer <= 537 && navigator.vendor.toLowerCase().indexOf('google') == 0;
BrowserName = "Unknown";
if (isInternetExplorer)
BrowserName = "IE";
else if (isEdge)
BrowserName = "Edge";
else if (isSafari)
BrowserName = "Safari";
else if (isOpera)
BrowserName = "Opera";
else if (isChrome)
BrowserName = "Chrome";
else if (isFirefox)
BrowserName = "Firefox";
else if (isNativeChrome)
BrowserName = "NativeChrome";
else
BrowserName = "Unknown";
OSName = "Unknown";
if (isAndroid)
OSName = "Android";
else if (isIOS)
OSName = "iOS";
else if (isIPadOS)
OSName = "iPadOS";
else if (isWindows)
OSName = "Windows";
else if (isLinux)
OSName = "Linux";
else if (isBSD)
OSName = "BSD";
else if (isMacOSX)
OSName = "MacOSX";
else
OSName = "Unknown";
}
;
var WakeLock = /** @class */ (function () {
function WakeLock(logger) {
this.Logger = logger;
this.Logger.Log("Preparing WakeLock");
if (typeof navigator.wakeLock == "undefined") {
this.Logger.Log("Using video loop method.");
var video = document.createElement('video');
video.setAttribute('loop', '');
video.setAttribute('style', 'position: fixed; opacity: 0.1; pointer-events: none;');
WakeLock.AddSourceToVideo(video, 'webm', 'data:video/webm;base64,' + WakeLock.VideoWebm);
WakeLock.AddSourceToVideo(video, 'mp4', 'data:video/mp4;base64,' + WakeLock.VideoMp4);
document.body.appendChild(video);
this.LockElement = video;
}
else {
this.Logger.Log("Using WakeLock API.");
this.LockElement = null;
}
}
WakeLock.prototype.Begin = function () {
var _this = this;
if (this.LockElement == null) {
try {
navigator.wakeLock.request("screen").then(function (obj) {
_this.Logger.Log("WakeLock request successful. Lock acquired.");
_this.LockElement = obj;
}, function () {
_this.Logger.Log("WakeLock request failed.");
});
}
catch (err) {
this.Logger.Log("WakeLock request failed.");
}
}
else {
this.Logger.Log("WakeLock video loop started.");
this.LockElement.play();
}
};
WakeLock.AddSourceToVideo = function (element, type, dataURI) {
var source = document.createElement('source');
source.src = dataURI;
source.type = 'video/' + type;
element.appendChild(source);
};
WakeLock.VideoWebm = 'GkXfo0AgQoaBAUL3gQFC8oEEQvOBCEKCQAR3ZWJtQoeBAkKFgQIYU4BnQI0VSalmQCgq17FAAw9CQE2AQAZ3aGFtbXlXQUAGd2hhbW15RIlACECPQAAAAAAAFlSua0AxrkAu14EBY8WBAZyBACK1nEADdW5khkAFVl9WUDglhohAA1ZQOIOBAeBABrCBCLqBCB9DtnVAIueBAKNAHIEAAIAwAQCdASoIAAgAAUAmJaQAA3AA/vz0AAA=';
WakeLock.VideoMp4 = 'AAAAHGZ0eXBpc29tAAACAGlzb21pc28ybXA0MQAAAAhmcmVlAAAAG21kYXQAAAGzABAHAAABthADAowdbb9/AAAC6W1vb3YAAABsbXZoZAAAAAB8JbCAfCWwgAAAA+gAAAAAAAEAAAEAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAIVdHJhawAAAFx0a2hkAAAAD3wlsIB8JbCAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAQAAAAAAIAAAACAAAAAABsW1kaWEAAAAgbWRoZAAAAAB8JbCAfCWwgAAAA+gAAAAAVcQAAAAAAC1oZGxyAAAAAAAAAAB2aWRlAAAAAAAAAAAAAAAAVmlkZW9IYW5kbGVyAAAAAVxtaW5mAAAAFHZtaGQAAAABAAAAAAAAAAAAAAAkZGluZgAAABxkcmVmAAAAAAAAAAEAAAAMdXJsIAAAAAEAAAEcc3RibAAAALhzdHNkAAAAAAAAAAEAAACobXA0dgAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAIAAgASAAAAEgAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABj//wAAAFJlc2RzAAAAAANEAAEABDwgEQAAAAADDUAAAAAABS0AAAGwAQAAAbWJEwAAAQAAAAEgAMSNiB9FAEQBFGMAAAGyTGF2YzUyLjg3LjQGAQIAAAAYc3R0cwAAAAAAAAABAAAAAQAAAAAAAAAcc3RzYwAAAAAAAAABAAAAAQAAAAEAAAABAAAAFHN0c3oAAAAAAAAAEwAAAAEAAAAUc3RjbwAAAAAAAAABAAAALAAAAGB1ZHRhAAAAWG1ldGEAAAAAAAAAIWhkbHIAAAAAAAAAAG1kaXJhcHBsAAAAAAAAAAAAAAAAK2lsc3QAAAAjqXRvbwAAABtkYXRhAAAAAQAAAABMYXZmNTIuNzguMw==';
return WakeLock;
}());
//# sourceMappingURL=3las.helpers.js.map

View File

@@ -0,0 +1,26 @@
/*
Logging is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var Logging = /** @class */ (function () {
function Logging(parentElement, childElementType) {
this.ParentElement = parentElement;
this.ChildElementType = childElementType;
}
Logging.prototype.Log = function (message) {
var dateTime = new Date();
var lineText = "[" + (dateTime.getHours() > 9 ? dateTime.getHours() : "0" + dateTime.getHours()) + ":" +
(dateTime.getMinutes() > 9 ? dateTime.getMinutes() : "0" + dateTime.getMinutes()) + ":" +
(dateTime.getSeconds() > 9 ? dateTime.getSeconds() : "0" + dateTime.getSeconds()) +
"] " + message;
if (this.ParentElement && this.ChildElementType) {
var line = document.createElement(this.ChildElementType);
line.innerText = lineText;
this.ParentElement.appendChild(line);
}
else {
console.log(lineText);
}
};
return Logging;
}());

View File

@@ -0,0 +1,78 @@
/*
WebSocket client is part of 3LAS (Low Latency Live Audio Streaming)
https://github.com/JoJoBond/3LAS
*/
var WebSocketClient = /** @class */ (function () {
function WebSocketClient(logger, uri, errorCallback, connectCallback, dataReadyCallback, disconnectCallback) {
this.Logger = logger;
this.Uri = uri;
// Check callback argument
if (typeof errorCallback !== 'function')
throw new Error('WebSocketClient: ErrorCallback must be specified');
if (typeof connectCallback !== 'function')
throw new Error('WebSocketClient: ConnectCallback must be specified');
if (typeof dataReadyCallback !== 'function')
throw new Error('WebSocketClient: DataReadyCallback must be specified');
if (typeof disconnectCallback !== 'function')
throw new Error('WebSocketClient: DisconnectCallback must be specified');
this.ErrorCallback = errorCallback;
this.ConnectCallback = connectCallback;
this.DataReadyCallback = dataReadyCallback;
this.DisconnectCallback = disconnectCallback;
// Client is not yet connected
this.IsConnected = false;
// Create socket, connect to URI
if (typeof WebSocket !== "undefined")
this.Socket = new WebSocket(this.Uri);
else if (typeof webkitWebSocket !== "undefined")
this.Socket = new webkitWebSocket(this.Uri);
else if (typeof mozWebSocket !== "undefined")
this.Socket = new mozWebSocket(this.Uri);
else
throw new Error('WebSocketClient: Browser does not support "WebSocket".');
this.Socket.binaryType = 'arraybuffer';
this.Socket.addEventListener("open", this.OnOpen.bind(this));
this.Socket.addEventListener("error", this.OnError.bind(this));
this.Socket.addEventListener("close", this.OnClose.bind(this));
this.Socket.addEventListener("message", this.OnMessage.bind(this));
}
Object.defineProperty(WebSocketClient.prototype, "Connected", {
get: function () {
return this.IsConnected;
},
enumerable: false,
configurable: true
});
WebSocketClient.prototype.Send = function (message) {
if (!this.IsConnected)
return;
this.Socket.send(message);
};
// Handle errors
WebSocketClient.prototype.OnError = function (_ev) {
if (this.IsConnected == true)
this.ErrorCallback("Socket fault.");
else
this.ErrorCallback("Could not connect to server.");
};
// Change connetion status once connected
WebSocketClient.prototype.OnOpen = function (_ev) {
if (this.Socket.readyState == 1) {
this.IsConnected = true;
this.ConnectCallback();
}
};
// Change connetion status on disconnect
WebSocketClient.prototype.OnClose = function (_ev) {
if (this.IsConnected == true && (this.Socket.readyState == 2 || this.Socket.readyState == 3)) {
this.IsConnected = false;
this.DisconnectCallback();
}
};
// Handle incomping data
WebSocketClient.prototype.OnMessage = function (ev) {
// Trigger callback
this.DataReadyCallback(ev.data);
};
return WebSocketClient;
}());