Afficher image ou la remplacer par une autre selon l'état d'une variable en JS ?

Fermé
basilegrnd Messages postés 5 Date d'inscription mercredi 24 mars 2021 Statut Membre Dernière intervention 6 avril 2021 - Modifié le 6 avril 2021 à 09:16
Hey,
Je travaille actuellement sur un projet pouvant paraître étrange : recréér macOS en HTML.

Pour le Dictaphone, j'aimerais que lorsqu'on lance l'enregistrement, il affiche
<img src="stop record.svg">
et que quand il n'y a pas d'enregistrement en cours, on affiche
<img src="start record.svg">
.

C'est possible ? Et d'ailleurs dans ce cas-là, comment transmettre l'information au JS ?

Pour information, voici mon code : HTML Javascript


<!DOCTYPE html>

<html>
<head>
<meta charset="utf-8" />
<title>Online Big Sur | Dictaphone</title>
</head>

<style>
@font-face {
font-family: 'SF UI Display Light';
font-style: normal;
font-weight: normal;
src: local('SF UI Display Light'), url('sf-ui-display-light-58646b33e0551.woff') format('woff');
}

@font-face {
font-family: 'SF UI Display Medium';
font-style: normal;
font-weight: normal;
src: local('SF UI Display Medium'), url('sf-ui-display-medium-58646be638f96.woff') format('woff');
}


@font-face {
font-family: 'SF UI Display Semibold';
font-style: normal;
font-weight: normal;
src: local('SF UI Display Semibold'), url('sf-ui-display-semibold-58646eddcae92.woff') format('woff');
}

@font-face {
font-family: 'SF UI Display Bold';
font-style: normal;
font-weight: normal;
src: local('SF UI Display Bold'), url('sf-ui-display-bold-58646a511e3d9.woff') format('woff');
}


@font-face {
font-family: 'SF UI Display Heavy';
font-style: normal;
font-weight: normal;
src: local('SF UI Display Heavy'), url('sf-ui-display-heavy-586470160b9e5.woff') format('woff');
}
body {
background-color: #EEE;
font-family: "SF UI Display Bold";
color: white;
font-size: 40px;
}
.red {
background-color: red;
height: 40px;
width: 120px;
border-radius: 30px;
border: none;
font-family: "SF UI Display Semibold";
font-size: 15px;
margin: 10px;
}
</style>

<body>
<center><img src="blink.png" height="35px" width="1px"><br/>
Dictaphone<br/>
<button id="startRecordingButton" class="red">Démarrer</button><br/>
<button id="stopRecordingButton" class="red">Arrêter</button><br/>
<button id="playButton" class="red">Écouter</button><br/>
<button id="downloadButton" class="red">Télécharger</button><br/>
</center>
</body>
</html>

<script>
var startRecordingButton = document.getElementById("startRecordingButton");
var stopRecordingButton = document.getElementById("stopRecordingButton");
var playButton = document.getElementById("playButton");
var downloadButton = document.getElementById("downloadButton");


var leftchannel = [];
var rightchannel = [];
var recorder = null;
var recordingLength = 0;
var volume = null;
var mediaStream = null;
var sampleRate = 44100;
var context = null;
var blob = null;

startRecordingButton.addEventListener("click", function (){
// Initialize recorder
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
navigator.getUserMedia(
{
audio: true
},
function (e) {
console.log("user consent");

// creates the audio context
window.AudioContext = window.AudioContext || window.webkitAudioContext;
context = new AudioContext();

// creates an audio node from the microphone incoming stream
mediaStream = context.createMediaStreamSource(e);

// https://developer.mozilla.org/en-US/docs/Web/API/BaseAudioContext/createScriptProcessor
// bufferSize: the onaudioprocess event is called when the buffer is full
var bufferSize = 2048;
var numberOfInputChannels = 2;
var numberOfOutputChannels = 2;
if (context.createScriptProcessor) {
recorder = context.createScriptProcessor(bufferSize, numberOfInputChannels, numberOfOutputChannels);
} else {
recorder = context.createJavaScriptNode(bufferSize, numberOfInputChannels, numberOfOutputChannels);
}

recorder.onaudioprocess = function (e) {
leftchannel.push(new Float32Array(e.inputBuffer.getChannelData(0)));
rightchannel.push(new Float32Array(e.inputBuffer.getChannelData(1)));
recordingLength += bufferSize;
}

// we connect the recorder
mediaStream.connect(recorder);
recorder.connect(context.destination);
},
function (e) {
console.error(e);
});
});

stopRecordingButton.addEventListener("click", function (){

// stop recording
recorder.disconnect(context.destination);
mediaStream.disconnect(recorder);

// we flat the left and right channels down
// Float32Array[] => Float32Array
var leftBuffer = flattenArray(leftchannel, recordingLength);
var rightBuffer = flattenArray(rightchannel, recordingLength);
// we interleave both channels together
// [left[0],right[0],left[1],right[1],...]
var interleaved = interleave(leftBuffer, rightBuffer);

var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);

writeUTFBytes(view, 0, 'RIFF');
view.setUint32(4, 44 + interleaved.length * 2, true);
writeUTFBytes(view, 8, 'WAVE');
writeUTFBytes(view, 12, 'fmt ');
view.setUint32(16, 16, true); // chunkSize
view.setUint16(20, 1, true); // wFormatTag
view.setUint16(22, 2, true); // wChannels: stereo (2 channels)
view.setUint32(24, sampleRate, true); // dwSamplesPerSec
view.setUint32(28, sampleRate * 4, true); // dwAvgBytesPerSec
view.setUint16(32, 4, true); // wBlockAlign
view.setUint16(34, 16, true); // wBitsPerSample
// data sub-chunk
writeUTFBytes(view, 36, 'data');
view.setUint32(40, interleaved.length * 2, true);

// write the PCM samples
var index = 44;
var volume = 1;
for (var i = 0; i < interleaved.length; i++) {
view.setInt16(index, interleaved[i] * (0x7FFF * volume), true);
index += 2;
}

blob = new Blob([view], { type: 'audio/wav' });
});

playButton.addEventListener("click", function (){
if (blob == null) {
return;
}

var url = window.URL.createObjectURL(blob);
var audio = new Audio(url);
audio.play();
});

downloadButton.addEventListener("click", function (){
if (blob == null) {
return;
}

var url = URL.createObjectURL(blob);

var a = document.createElement("a");
document.body.appendChild(a);
a.style = "display: none";
a.href = url;
a.download = "Nouvel Enregistrement.mp3";
a.click();
window.URL.revokeObjectURL(url);
});

function flattenArray(channelBuffer, recordingLength) {
var result = new Float32Array(recordingLength);
var offset = 0;
for (var i = 0; i < channelBuffer.length; i++) {
var buffer = channelBuffer[i];
result.set(buffer, offset);
offset += buffer.length;
}
return result;
}

function interleave(leftChannel, rightChannel) {
var length = leftChannel.length + rightChannel.length;
var result = new Float32Array(length);

var inputIndex = 0;

for (var index = 0; index < length;) {
result[index++] = leftChannel[inputIndex];
result[index++] = rightChannel[inputIndex];
inputIndex++;
}
return result;
}

function writeUTFBytes(view, offset, string) {
for (var i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}

</script>