rfb: add qemu audio support
Signed-off-by: Dietmar Maurer <dietmar@proxmox.com>
This commit is contained in:
parent
154653523c
commit
9523c996bf
|
@ -333,6 +333,12 @@ const UI = {
|
|||
.addEventListener('click', UI.rejectServer);
|
||||
document.getElementById("noVNC_credentials_button")
|
||||
.addEventListener('click', UI.setCredentials);
|
||||
|
||||
document.addEventListener('click', function(event) {
|
||||
if (UI.rfb !== undefined) {
|
||||
UI.rfb.allow_audio();
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
addClipboardHandlers() {
|
||||
|
|
|
@ -24,6 +24,7 @@ export const encodings = {
|
|||
pseudoEncodingLastRect: -224,
|
||||
pseudoEncodingCursor: -239,
|
||||
pseudoEncodingQEMUExtendedKeyEvent: -258,
|
||||
pseudoEncodingQEMUAudioEvent: -259,
|
||||
pseudoEncodingQEMULedEvent: -261,
|
||||
pseudoEncodingDesktopName: -307,
|
||||
pseudoEncodingExtendedDesktopSize: -308,
|
||||
|
|
117
core/rfb.js
117
core/rfb.js
|
@ -154,6 +154,13 @@ export default class RFB extends EventTargetMixin {
|
|||
|
||||
this._qemuExtKeyEventSupported = false;
|
||||
|
||||
this._qemuAudioSupported = false;
|
||||
this._page_had_user_interaction = false;
|
||||
this._audio_next_start = 0;
|
||||
this._audio_sample_rate = 44100;
|
||||
this._audio_channels = 2;
|
||||
this._audio_context = null;
|
||||
|
||||
this._extendedPointerEventSupported = false;
|
||||
|
||||
this._clipboardText = null;
|
||||
|
@ -991,7 +998,6 @@ export default class RFB extends EventTargetMixin {
|
|||
Log.Warn("handleMessage called on an empty receive queue");
|
||||
return;
|
||||
}
|
||||
|
||||
switch (this._rfbConnectionState) {
|
||||
case 'disconnected':
|
||||
Log.Error("Got data while disconnected");
|
||||
|
@ -2256,6 +2262,7 @@ export default class RFB extends EventTargetMixin {
|
|||
encs.push(encodings.pseudoEncodingDesktopSize);
|
||||
encs.push(encodings.pseudoEncodingLastRect);
|
||||
encs.push(encodings.pseudoEncodingQEMUExtendedKeyEvent);
|
||||
encs.push(encodings.pseudoEncodingQEMUAudioEvent);
|
||||
encs.push(encodings.pseudoEncodingQEMULedEvent);
|
||||
encs.push(encodings.pseudoEncodingExtendedDesktopSize);
|
||||
encs.push(encodings.pseudoEncodingXvp);
|
||||
|
@ -2611,6 +2618,9 @@ export default class RFB extends EventTargetMixin {
|
|||
case 250: // XVP
|
||||
return this._handleXvpMsg();
|
||||
|
||||
case 255: // Qemu Server Message
|
||||
return this._handleQemuAudioEvent();
|
||||
|
||||
default:
|
||||
this._fail("Unexpected server message (type " + msgType + ")");
|
||||
Log.Debug("sock.rQpeekBytes(30): " + this._sock.rQpeekBytes(30));
|
||||
|
@ -2683,6 +2693,13 @@ export default class RFB extends EventTargetMixin {
|
|||
this._qemuExtKeyEventSupported = true;
|
||||
return true;
|
||||
|
||||
case encodings.pseudoEncodingQEMUAudioEvent:
|
||||
if (!this._qemuAudioSupported) {
|
||||
RFB.messages.enableQemuAudioUpdates(this._sock, this._audio_channels, this._audio_sample_rate);
|
||||
this._qemuAudioSupported = true;
|
||||
}
|
||||
return true;
|
||||
|
||||
case encodings.pseudoEncodingDesktopName:
|
||||
return this._handleDesktopName();
|
||||
|
||||
|
@ -2705,6 +2722,88 @@ export default class RFB extends EventTargetMixin {
|
|||
}
|
||||
}
|
||||
|
||||
_handleQemuAudioEvent() {
|
||||
if (this._sock.rQwait("Qemu Audio Event", 3, 1)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const submsg = this._sock.rQshift8();
|
||||
const operation = this._sock.rQshift16();
|
||||
|
||||
switch (operation) {
|
||||
case 0: {
|
||||
this._audio_context = null;
|
||||
this._audio_next_start = 0;
|
||||
return true;
|
||||
}
|
||||
case 1: {
|
||||
this._audio_context = new AudioContext({
|
||||
latencyHint: "interactive",
|
||||
sampleRate: this._audio_sample_rate,
|
||||
});
|
||||
this._audio_next_start = 0;
|
||||
return true;
|
||||
}
|
||||
case 2: break;
|
||||
default: {
|
||||
Log.Warn("The given qemu audio opertaion + opertaion + is not supported.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._sock.rQwait("Qemu Audio payload length", 4, 4)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const length = this._sock.rQshift32();
|
||||
|
||||
if (this._sock.rQwait("audio payload", length, 8)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (length !== 0) {
|
||||
let payload = this._sock.rQshiftBytes(length, false);
|
||||
|
||||
if (this._audio_context === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let sample_bytes = 2*this._audio_channels;
|
||||
let buffer = this._audio_context.createBuffer(this._audio_channels, length/sample_bytes, this._audio_sample_rate);
|
||||
|
||||
for (let ch = 0; ch < this._audio_channels; ch++) {
|
||||
const channel = buffer.getChannelData(ch);
|
||||
let channel_offset = ch*2;
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
let p = i*sample_bytes + channel_offset;
|
||||
let value = payload[p] + payload[p+1]*256;
|
||||
channel[i] = (value / 32768.0) - 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._page_had_user_interaction) {
|
||||
let ctime = this._audio_context.currentTime;
|
||||
if (ctime > this._audio_next_start) {
|
||||
this._audio_next_start = ctime;
|
||||
}
|
||||
let start_time = this._audio_next_start;
|
||||
|
||||
this._audio_next_start += buffer.duration;
|
||||
|
||||
let source = this._audio_context.createBufferSource();
|
||||
source.buffer = buffer;
|
||||
source.connect(this._audio_context.destination);
|
||||
source.start(start_time);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
allow_audio() {
|
||||
this._page_had_user_interaction = true;
|
||||
}
|
||||
|
||||
_handleVMwareCursor() {
|
||||
const hotx = this._FBU.x; // hotspot-x
|
||||
const hoty = this._FBU.y; // hotspot-y
|
||||
|
@ -3314,6 +3413,22 @@ RFB.messages = {
|
|||
sock.flush();
|
||||
},
|
||||
|
||||
enableQemuAudioUpdates(sock, channels, sample_rate) {
|
||||
|
||||
sock.sQpush8(255); // msg-type
|
||||
sock.sQpush8(1); // submessage-type
|
||||
sock.sQpush16(2); // set sample format
|
||||
sock.sQpush8(2); // format U16
|
||||
sock.sQpush8(channels);
|
||||
sock.sQpush32(sample_rate); // audio frequency
|
||||
|
||||
sock.sQpush8(255); // msg-type
|
||||
sock.sQpush8(1); // submessage-type
|
||||
sock.sQpush16(0); // enable audio
|
||||
|
||||
sock.flush();
|
||||
},
|
||||
|
||||
pixelFormat(sock, depth, trueColor) {
|
||||
let bpp;
|
||||
|
||||
|
|
Loading…
Reference in New Issue