Files
ggwave/examples/buttons/index-tmpl.html
Georgi Gerganov f5e08d921b ggwave : various improvements
- faster FFT implementation
- built-in Direct Sequence Spread option
- remove <map> dependency from implementation
- update arduino-rx example
2022-07-04 21:18:20 +03:00

227 lines
9.0 KiB
HTML

<!doctype html>
<html lang="en-us">
<head>
<title>ggwave : buttons</title>
</head>
<body>
<div id="main-container">
Talking buttons tester:
<br><br>
<button onclick="onSend('RRR');">Red</button>
<button onclick="onSend('GGG');">Green</button>
<button onclick="onSend('BBB');">Blue</button>
<br><br>
<i>If you have Fluent Pet talking buttons, enable this option during the recording:</i><br><br>
<input type="checkbox" id="checkbox-fp">Fluent Pet</input>
<br>
<textarea name="textarea" id="rxData" style="width:300px;height:100px;" disabled hidden></textarea><br>
<button id="captureStart">Start capturing</button>
<button id="captureStop" hidden>Stop capturing</button>
<br><br>
<div class="cell-version">
<span>
|
Build time: <span class="nav-link">@GIT_DATE@</span> |
Commit hash: <a class="nav-link" href="https://github.com/ggerganov/ggwave/commit/@GIT_SHA1@">@GIT_SHA1@</a> |
Commit subject: <span class="nav-link">@GIT_COMMIT_SUBJECT@</span> |
<a class="nav-link" href="https://github.com/ggerganov/ggwave/tree/master/examples/ggwave-js">Source Code</a> |
</span>
</div>
</div>
<script type="text/javascript" src="ggwave.js"></script>
<script type='text/javascript'>
window.AudioContext = window.AudioContext || window.webkitAudioContext;
window.OfflineAudioContext = window.OfflineAudioContext || window.webkitOfflineAudioContext;
var context = null;
var recorder = null;
// the ggwave module instance
var ggwave = null;
var ggwave_FluentPet = null;
var parameters = null;
var instance = null;
const kPayloadLength = 3;
// instantiate the ggwave instance
// ggwave_factory comes from the ggwave.js module
ggwave_factory().then(function(obj) {
ggwave = obj;
});
// instantiate a second instance specific for the Fluent Pet talking buttons
ggwave_factory().then(function(obj) {
ggwave_FluentPet = obj;
});
var txData = document.getElementById("txData");
var rxData = document.getElementById("rxData");
var captureStart = document.getElementById("captureStart");
var captureStop = document.getElementById("captureStop");
// helper function
function convertTypedArray(src, type) {
var buffer = new ArrayBuffer(src.byteLength);
var baseView = new src.constructor(buffer).set(src);
return new type(buffer);
}
// initialize audio context and ggwave
function init() {
if (!context) {
context = new AudioContext({sampleRate: 48000});
parameters = ggwave.getDefaultParameters();
parameters.payloadLength = kPayloadLength;
parameters.sampleRateInp = context.sampleRate;
parameters.sampleRateOut = context.sampleRate;
parameters.operatingMode = GGWAVE_OPERATING_MODE_RX_AND_TX | GGWAVE_OPERATING_MODE_USE_DSS;
instance = ggwave.init(parameters);
parameters = ggwave_FluentPet.getDefaultParameters();
parameters.payloadLength = kPayloadLength;
parameters.sampleRateInp = context.sampleRate;
parameters.sampleRateOut = context.sampleRate - 512;
parameters.operatingMode = GGWAVE_OPERATING_MODE_RX_AND_TX | GGWAVE_OPERATING_MODE_USE_DSS;
instance = ggwave_FluentPet.init(parameters);
}
}
//
// Tx
//
function onSend(text) {
init();
var payload = new Uint8Array(text.length);
for (var i = 0; i < text.length; i++) {
payload[i] = text.charCodeAt(i);
}
// generate audio waveform
var waveform = null;
if (document.getElementById("checkbox-fp").checked) {
waveform = ggwave_FluentPet.encode(instance, payload, ggwave_FluentPet.TxProtocolId.GGWAVE_TX_PROTOCOL_DT_FAST, 25)
} else {
waveform = ggwave.encode(instance, payload, ggwave.TxProtocolId.GGWAVE_TX_PROTOCOL_DT_FAST, 25)
}
// play audio
var buf = convertTypedArray(waveform, Float32Array);
var buffer = context.createBuffer(1, buf.length, context.sampleRate);
buffer.getChannelData(0).set(buf);
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
source.start(0);
}
//
// Rx
//
captureStart.addEventListener("click", function () {
init();
let constraints = {
audio: {
// not sure if these are necessary to have
echoCancellation: false,
autoGainControl: false,
noiseSuppression: false
}
};
navigator.mediaDevices.getUserMedia(constraints).then(function (e) {
mediaStream = context.createMediaStreamSource(e);
var bufferSize = 1024;
var numberOfInputChannels = 1;
var numberOfOutputChannels = 1;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(
bufferSize,
numberOfInputChannels,
numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(
bufferSize,
numberOfInputChannels,
numberOfOutputChannels);
}
recorder.onaudioprocess = function (e) {
var source = e.inputBuffer;
var res = ggwave.decode(instance, convertTypedArray(new Float32Array(source.getChannelData(0)), Int8Array));
if (res && res.length == kPayloadLength) {
// DSS
var payload = "";
res8 = convertTypedArray(res, Uint8Array);
for (var i = 0; i < 3; i++) {
payload += String.fromCharCode(res8[i]);
}
if (payload == 'RRR' || payload == 'GGG' || payload == 'BBB') {
rxData.value = payload;
// change page background color
switch (payload) {
case 'RRR':
document.body.style.backgroundColor = '#ff0000';
break;
case 'GGG':
document.body.style.backgroundColor = '#00ff00';
break;
case 'BBB':
document.body.style.backgroundColor = '#0000ff';
break;
}
setTimeout(function () {
document.body.style.backgroundColor = '#ffffff';
}, 1000);
}
}
}
mediaStream.connect(recorder);
recorder.connect(context.destination);
}).catch(function (e) {
console.error(e);
});
rxData.value = 'Listening ...';
captureStart.hidden = true;
captureStop.hidden = false;
});
captureStop.addEventListener("click", function () {
if (recorder) {
recorder.disconnect(context.destination);
mediaStream.disconnect(recorder);
recorder = null;
}
rxData.value = 'Audio capture is paused! Press the "Start capturing" button to analyze audio from the microphone';
captureStart.hidden = false;
captureStop.hidden = true;
});
captureStop.click();
</script>
</body>
</html>