audio with opus codec

This commit is contained in:
trichimtrich 2019-04-16 07:09:36 +08:00
parent 243d547632
commit f9df886dee
10 changed files with 376 additions and 237 deletions

155
main.go
View file

@ -19,11 +19,10 @@ import (
"github.com/gorilla/websocket"
pionRTC "github.com/pion/webrtc"
// "gopkg.in/hraban/opus.v2"
"github.com/xlab/opus-go/opus"
"github.com/gordonklaus/portaudio"
"gopkg.in/hraban/opus.v2"
)
const (
width = 256
height = 240
@ -33,6 +32,7 @@ const (
debugIndex = "./static/index_ws.html"
)
var indexFN = gameboyIndex
// Time allowed to write a message to the peer.
@ -308,140 +308,73 @@ func fanoutScreen(imageChannel chan *image.RGBA, roomID string) {
if isRoomRunning == false {
log.Println("Closed room from screen routine", roomID)
rooms[roomID].closedChannel <- true
return
}
}
}
var cc chan float32 = make(chan float32, 100000)
func Callback(out []float32) {
var output float32
for i := range out {
if i % 2 == 0 {
select {
case sample := <-cc:
output = sample
default:
output = 0
}
out[i] = output
}
}
}
// fanoutAudio fanout outputs to all webrtc in the same room
func fanoutAudio(audioChannel chan float32, roomID string) {
var output float32
pcm := make([]float32, 240)
log.Println("Enter fan audio")
portaudio.Initialize()
defer portaudio.Terminate()
enc, err := opus.NewEncoder(ui.SampleRate, ui.Channels, opus.AppAudio)
host, err := portaudio.DefaultHostApi()
if err != nil {
log.Println(err)
return
}
parameters := portaudio.HighLatencyParameters(nil, host.DefaultOutputDevice)
stream, err := portaudio.OpenStream(parameters, Callback)
if err != nil {
log.Println(err)
return
}
log.Println(parameters.SampleRate, parameters.Output.Channels)
stream.Start()
// dec, err := opus.NewDecoder(48000, 2)
// enc, err := opus.NewEncoder(48000, 2, opus.AppAudio)
// // ix, _ := enc.DTX() //false
// // ix1, _ := enc.Bitrate() //120000
// // ix2, _ := enc.Complexity() //9
// // ix3, _ := enc.MaxBandwidth() //1105
// // ix4, _ := enc.PacketLossPerc() //0
// enc.SetMaxBandwidth(opus.Fullband)
// enc.SetBitrateToAuto()
// enc.SetComplexity(10)
var err2 int32
dec := opus.DecoderCreate(48000, 2, &err2)
enc := opus.EncoderCreate(48000, 2, opus.ApplicationAudio, &err2)
maxBufferSize := ui.TimeFrame * ui.SampleRate / 1000
pcm := make([]float32, maxBufferSize) // 640 * 1000 / 16000 == 40 ms
idx := 0
if err != nil {
log.Println("[!] Cannot create audio encoder")
return
}
c := time.Tick(time.Microsecond * 2500)
for {
pcm[idx] = <- audioChannel
idx ++
for range c {
// for {
for i := 0; i < len(pcm); i++ {
if i % 2 == 0 {
select {
case sample := <- audioChannel:
output = sample
default:
output = 0
}
pcm[i] = output
}
}
if idx >= len(pcm) {
data := make([]byte, 640)
data := make([]byte, 1000)
// n, err := enc.EncodeFloat32(pcm, data)
n := opus.EncodeFloat(enc, pcm, 120, data, 1000)
if err != nil {
log.Println("[!] Failed to decode")
continue
}
data = data[:n]
pcm2 := make([]float32, 1000)
// n2, err := dec.DecodeFloat32(data, pcm2)
n2 := opus.DecodeFloat(dec, string(data), n, pcm2, 1000, 0)
pcm2 = pcm2[:n2]
for i := 0; i < int(n2); i++ {
cc <- pcm2[i]
}
log.Println(n, n2)
isRoomRunning := false
for _, webRTC := range rooms[roomID].rtcSessions {
// Client stopped
if webRTC.IsClosed() {
n, err := enc.EncodeFloat32(pcm, data)
// n := opus.EncodeFloat(enc, pcm, 120, data, 1000)
if err != nil {
log.Println("[!] Failed to decode")
continue
}
data = data[:n]
// encode frame
// fanout imageChannel
if webRTC.IsConnected() {
// NOTE: can block here
webRTC.AudioChannel <- data
isRoomRunning := false
for _, webRTC := range rooms[roomID].rtcSessions {
// Client stopped
if webRTC.IsClosed() {
continue
}
// encode frame
// fanout imageChannel
if webRTC.IsConnected() {
// NOTE: can block here
webRTC.AudioChannel <- data
}
isRoomRunning = true
}
isRoomRunning = true
}
if isRoomRunning == false {
log.Println("Closed room from audio routine", roomID)
rooms[roomID].closedChannel <- true
if isRoomRunning == false {
log.Println("Closed room from audio routine", roomID)
rooms[roomID].closedChannel <- true
return
}
idx = 0
}
}
}
// faninInput fan-in of the same room to inputChannel
func faninInput(inputChannel chan int, webRTC *webrtc.WebRTC, playerIndex int) {
for {

View file

@ -10,7 +10,9 @@ textarea {
<select id="gameOp">
</select>
<button id="play" onclick="window.startGame()">Play</button><br/><br/>
<button id="play" onclick="window.startGame()">Play</button>
<button id="play" onclick="pc.close()">Stop</button>
<button id="play" onclick="window.hihi()">Magic</button><br/><br/>
Your current room: <b><label id="currentRoomID" style="color:white"></b> <br />
You can join a remote game by roomID.<br />
Room ID: <input type="text" id="roomID">
@ -51,6 +53,10 @@ Play as player(1,2): <select id="playerIndex">
DEBUG = true;
</script>
<!-- https://rawgit.com/Rillke/opus.js-sample/master/index.xhtml -->
<script src="/static/js/libopus.js"></script>
<script src="/static/js/opus.js"></script>
<script src="https://code.jquery.com/jquery-3.3.1.min.js"></script>
<script src="/static/js/const.js"></script>
<script src="/static/js/global.js"></script>
@ -58,6 +64,11 @@ Play as player(1,2): <select id="playerIndex">
<script src="/static/js/ws.js"></script>
<script>
function hihi() {
track = stream.getAudioTracks()[0];
console.log(track);
}
GAME_LIST.forEach(e => {
ee = document.createElement("option");
ee.value = e.nes;

31
static/js/libopus.js Normal file

File diff suppressed because one or more lines are too long

200
static/js/opus.js Normal file
View file

@ -0,0 +1,200 @@
///<reference path="d.ts/asm.d.ts" />
///<reference path="d.ts/libopus.d.ts" />
var OpusApplication;
(function (OpusApplication) {
OpusApplication[OpusApplication["VoIP"] = 2048] = "VoIP";
OpusApplication[OpusApplication["Audio"] = 2049] = "Audio";
OpusApplication[OpusApplication["RestrictedLowDelay"] = 2051] = "RestrictedLowDelay";
})(OpusApplication || (OpusApplication = {}));
var OpusError;
(function (OpusError) {
OpusError[OpusError["OK"] = 0] = "OK";
OpusError[OpusError["BadArgument"] = -1] = "BadArgument";
OpusError[OpusError["BufferTooSmall"] = -2] = "BufferTooSmall";
OpusError[OpusError["InternalError"] = -3] = "InternalError";
OpusError[OpusError["InvalidPacket"] = -4] = "InvalidPacket";
OpusError[OpusError["Unimplemented"] = -5] = "Unimplemented";
OpusError[OpusError["InvalidState"] = -6] = "InvalidState";
OpusError[OpusError["AllocFail"] = -7] = "AllocFail";
})(OpusError || (OpusError = {}));
var Opus = (function () {
function Opus() {
}
Opus.getVersion = function () {
var ptr = _opus_get_version_string();
return Pointer_stringify(ptr);
};
Opus.getMaxFrameSize = function (numberOfStreams) {
if (numberOfStreams === void 0) { numberOfStreams = 1; }
return (1275 * 3 + 7) * numberOfStreams;
};
Opus.getMinFrameDuration = function () {
return 2.5;
};
Opus.getMaxFrameDuration = function () {
return 60;
};
Opus.validFrameDuration = function (x) {
return [2.5, 5, 10, 20, 40, 60].some(function (element) {
return element == x;
});
};
Opus.getMaxSamplesPerChannel = function (sampling_rate) {
return sampling_rate / 1000 * Opus.getMaxFrameDuration();
};
return Opus;
})();
var OpusEncoder = (function () {
function OpusEncoder(sampling_rate, channels, app, frame_duration) {
if (frame_duration === void 0) { frame_duration = 20; }
this.handle = 0;
this.frame_size = 0;
this.in_ptr = 0;
this.in_off = 0;
this.out_ptr = 0;
if (!Opus.validFrameDuration(frame_duration))
throw 'invalid frame duration';
this.frame_size = sampling_rate * frame_duration / 1000;
var err_ptr = allocate(4, 'i32', ALLOC_STACK);
this.handle = _opus_encoder_create(sampling_rate, channels, app, err_ptr);
if (getValue(err_ptr, 'i32') != 0 /* OK */)
throw 'opus_encoder_create failed: ' + getValue(err_ptr, 'i32');
this.in_ptr = _malloc(this.frame_size * channels * 4);
this.in_len = this.frame_size * channels;
this.in_i16 = HEAP16.subarray(this.in_ptr >> 1, (this.in_ptr >> 1) + this.in_len);
this.in_f32 = HEAPF32.subarray(this.in_ptr >> 2, (this.in_ptr >> 2) + this.in_len);
this.out_bytes = Opus.getMaxFrameSize();
this.out_ptr = _malloc(this.out_bytes);
this.out_buf = HEAPU8.subarray(this.out_ptr, this.out_ptr + this.out_bytes);
}
OpusEncoder.prototype.encode = function (pcm) {
var output = [];
var pcm_off = 0;
while (pcm.length - pcm_off >= this.in_len - this.in_off) {
if (this.in_off > 0) {
this.in_i16.set(pcm.subarray(pcm_off, pcm_off + this.in_len - this.in_off), this.in_off);
pcm_off += this.in_len - this.in_off;
this.in_off = 0;
}
else {
this.in_i16.set(pcm.subarray(pcm_off, pcm_off + this.in_len));
pcm_off += this.in_len;
}
var ret = _opus_encode(this.handle, this.in_ptr, this.frame_size, this.out_ptr, this.out_bytes);
if (ret <= 0)
throw 'opus_encode failed: ' + ret;
var packet = new ArrayBuffer(ret);
new Uint8Array(packet).set(this.out_buf.subarray(0, ret));
output.push(packet);
}
if (pcm_off < pcm.length) {
this.in_i16.set(pcm.subarray(pcm_off));
this.in_off = pcm.length - pcm_off;
}
return output;
};
OpusEncoder.prototype.encode_float = function (pcm) {
var output = [];
var pcm_off = 0;
while (pcm.length - pcm_off >= this.in_len - this.in_off) {
if (this.in_off > 0) {
this.in_f32.set(pcm.subarray(pcm_off, pcm_off + this.in_len - this.in_off), this.in_off);
pcm_off += this.in_len - this.in_off;
this.in_off = 0;
}
else {
this.in_f32.set(pcm.subarray(pcm_off, pcm_off + this.in_len));
pcm_off += this.in_len;
}
var ret = _opus_encode_float(this.handle, this.in_ptr, this.frame_size, this.out_ptr, this.out_bytes);
if (ret <= 0)
throw 'opus_encode failed: ' + ret;
var packet = new ArrayBuffer(ret);
new Uint8Array(packet).set(this.out_buf.subarray(0, ret));
output.push(packet);
}
if (pcm_off < pcm.length) {
this.in_f32.set(pcm.subarray(pcm_off));
this.in_off = pcm.length - pcm_off;
}
return output;
};
OpusEncoder.prototype.encode_final = function () {
if (this.in_off == 0)
return new ArrayBuffer(0);
for (var i = this.in_off; i < this.in_len; ++i)
this.in_i16[i] = 0;
var ret = _opus_encode(this.handle, this.in_ptr, this.frame_size, this.out_ptr, this.out_bytes);
if (ret <= 0)
throw 'opus_encode failed: ' + ret;
var packet = new ArrayBuffer(ret);
new Uint8Array(packet).set(this.out_buf.subarray(0, ret));
return packet;
};
OpusEncoder.prototype.encode_float_final = function () {
if (this.in_off == 0)
return new ArrayBuffer(0);
for (var i = this.in_off; i < this.in_len; ++i)
this.in_f32[i] = 0;
var ret = _opus_encode_float(this.handle, this.in_ptr, this.frame_size, this.out_ptr, this.out_bytes);
if (ret <= 0)
throw 'opus_encode failed: ' + ret;
var packet = new ArrayBuffer(ret);
new Uint8Array(packet).set(this.out_buf.subarray(0, ret));
return packet;
};
OpusEncoder.prototype.destroy = function () {
if (!this.handle)
return;
_opus_encoder_destroy(this.handle);
_free(this.in_ptr);
this.handle = this.in_ptr = 0;
};
return OpusEncoder;
})();
var OpusDecoder = (function () {
function OpusDecoder(sampling_rate, channels) {
this.handle = 0;
this.in_ptr = 0;
this.out_ptr = 0;
this.channels = channels;
var err_ptr = allocate(4, 'i32', ALLOC_STACK);
this.handle = _opus_decoder_create(sampling_rate, channels, err_ptr);
if (getValue(err_ptr, 'i32') != 0 /* OK */)
throw 'opus_decoder_create failed: ' + getValue(err_ptr, 'i32');
this.in_ptr = _malloc(Opus.getMaxFrameSize(channels));
this.in_buf = HEAPU8.subarray(this.in_ptr, this.in_ptr + Opus.getMaxFrameSize(channels));
this.out_len = Opus.getMaxSamplesPerChannel(sampling_rate);
var out_bytes = this.out_len * channels * 4;
this.out_ptr = _malloc(out_bytes);
this.out_i16 = HEAP16.subarray(this.out_ptr >> 1, (this.out_ptr + out_bytes) >> 1);
this.out_f32 = HEAPF32.subarray(this.out_ptr >> 2, (this.out_ptr + out_bytes) >> 2);
}
OpusDecoder.prototype.decode = function (packet) {
this.in_buf.set(new Uint8Array(packet));
var ret = _opus_decode(this.handle, this.in_ptr, packet.byteLength, this.out_ptr, this.out_len, 0);
if (ret < 0)
throw 'opus_decode failed: ' + ret;
var samples = new Int16Array(ret * this.channels);
samples.set(this.out_i16.subarray(0, samples.length));
return samples;
};
OpusDecoder.prototype.decode_float = function (packet) {
this.in_buf.set(new Uint8Array(packet));
var ret = _opus_decode_float(this.handle, this.in_ptr, packet.byteLength, this.out_ptr, this.out_len, 0);
if (ret < 0)
throw 'opus_decode failed: ' + ret;
var samples = new Float32Array(ret * this.channels);
samples.set(this.out_f32.subarray(0, samples.length));
return samples;
};
OpusDecoder.prototype.destroy = function () {
if (!this.handle)
return;
_opus_decoder_destroy(this.handle);
_free(this.in_ptr);
_free(this.out_ptr);
this.handle = this.in_ptr = this.out_ptr = 0;
};
return OpusDecoder;
})();

View file

@ -58,16 +58,59 @@ function startGame() {
pc = new RTCPeerConnection({iceServers: [{urls: 'stun:stun.l.google.com:19302'}]})
// input channel
inputChannel = pc.createDataChannel('foo')
inputChannel.onclose = () => {
log('inputChannel has closed');
}
inputChannel.onopen = () => {
log('inputChannel has opened');
}
inputChannel.onclose = () => log('inputChannel has closed');
inputChannel.onopen = () => log('inputChannel has opened');
inputChannel.onmessage = e => {
log(`Message from DataChannel '${inputChannel.label}' payload '${e.data}'`);
console.log(e);
log(`Message '${e.data}'`);
}
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var delayTime = 0;
var init = 0;
var audioStack = [];
var nextTime = 0;
function scheduleBuffers() {
while ( audioStack.length) {
var buffer = audioStack.shift();
var source = context.createBufferSource();
source.buffer = buffer;
source.connect(context.destination);
if (nextTime == 0)
nextTime = context.currentTime + 0.05; /// add 50ms latency to work well across systems - tune this if you like
source.start(nextTime);
nextTime+=source.buffer.duration; // Make the next buffer wait the length of the last buffer before being played
};
}
sampleRate = 16000;
channels = 1;
bitDepth = 16;
decoder = new OpusDecoder(sampleRate, channels);
function damn(opusChunk) {
pcmChunk = decoder.decode_float(opusChunk);
myBuffer = context.createBuffer(channels, pcmChunk.length, sampleRate);
nowBuffering = myBuffer.getChannelData(0, bitDepth, sampleRate);
for (var i = 0; i < pcmChunk.length; i++) {
nowBuffering[i] = pcmChunk[i];
}
return myBuffer;
}
pc.ondatachannel = function (ev) {
log(`New data channel '${ev.channel.label}'`);
ev.channel.onopen = () => log('channelX has opened');
ev.channel.onclose = () => log('channelX has closed');
ev.channel.onmessage = (e) => {
audioStack.push(damn(e.data));
if ((init!=0) || (audioStack.length > 10)) { // make sure we put at least 10 chunks in the buffer before starting
init++;
scheduleBuffers();
}
}
}
@ -84,8 +127,9 @@ function startGame() {
}
}
var stream = new MediaStream();
window.stream = new MediaStream();
document.getElementById("game-screen2").srcObject = stream;
// stream channel
pc.ontrack = function (event) {
console.log(event);

View file

@ -1,19 +1,38 @@
<html>
<body>
<button onclick="play()">hihi</button>
<button onclick="alert(1)">hoho</button>
<b>hehe</b>
<script src="/static/js/libopus.js"></script>
<script src="/static/js/opus.js"></script>
<script>
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
a = new ArrayBuffer(3);
v = new DataView(a);
v.setInt8(0, -28);
v.setInt8(1, -1);
v.setInt8(2, -2);
console.log(a);
b = new Uint8Array(a);
console.log(b);
d = new OpusDecoder(48000, 2);
console.log(d);
console.log(d.decode_float(a));
// Stereo
var audioCtx = new (window.AudioContext || window.webkitAudioContext)();
var channels = 1;
var sampleRate = 16000;
var sampleRate = 48000;
var bitDepth = 16;
// Create an empty two second stereo buffer at the
// sample rate of the AudioContext
var req = new XMLHttpRequest();
var sound;
req.open('GET', "/static/sinewave.raw", false);
req.open('GET', "/static/ex3.raw", false);
// req.open('GET', "/static/sinewave.raw", false);
req.overrideMimeType('text\/plain; charset=x-user-defined');
req.onreadystatechange = function (aEvt) {
if (req.readyState == 4) {

View file

@ -1,85 +0,0 @@
package ui
import (
"github.com/gordonklaus/portaudio"
"log"
"gopkg.in/hraban/opus.v2"
// "github.com/xlab/opus-go/opus"
)
type Audio struct {
stream *portaudio.Stream
sampleRate float64
outputChannels int
channel chan float32
}
func NewAudio() *Audio {
a := Audio{}
a.channel = make(chan float32, 16000)
return &a
}
func (a *Audio) Start() error {
parameters := portaudio.StreamParameters{}
parameters.SampleRate = 44100
// host, err := portaudio.DefaultHostApi()
// if err != nil {
// return err
// }
// parameters := portaudio.HighLatencyParameters(nil, host.DefaultOutputDevice)
stream, err := portaudio.OpenDefaultStream(0, 1, 48000, 0, a.Callback)
// stream, err := portaudio.OpenStream(parameters, a.Callback)
if err != nil {
return err
}
if err := stream.Start(); err != nil {
return err
}
a.stream = stream
a.sampleRate = parameters.SampleRate
// a.outputChannels = parameters.Output.Channels
a.outputChannels = 1
log.Println(a.sampleRate, a.outputChannels, parameters.FramesPerBuffer)
return nil
}
func (a *Audio) Stop() error {
return a.stream.Close()
}
func (a *Audio) Callback(out []float32) {
var output float32
log.Println(len(out))
for i := range out {
if i%a.outputChannels == 1 {
select {
case sample := <-a.channel:
output = sample
default:
output = 0
}
}
out[i] = output
}
enc, err := opus.NewEncoder(48000, 1, opus.AppVoIP)
if err != nil {
log.Println("[!] Cannot create audio encoder", err)
return
}
data := make([]byte, 1000)
n, err := enc.EncodeFloat32(out, data)
if err != nil {
log.Println("[!] Failed to decode")
return
}
data = data[:n]
}

View file

@ -27,6 +27,9 @@ const (
right2
)
const NumKeys = 8
const SampleRate = 16000
const Channels = 1
const TimeFrame = 40
type GameView struct {
console *nes.Console
@ -39,10 +42,9 @@ type GameView struct {
imageChannel chan *image.RGBA
audioChanel chan float32
inputChannel chan int
}
func NewGameView(console *nes.Console, title, hash string, imageChannel chan *image.RGBA, audioChanel chan float32, inputChannel chan int) *GameView {
gameview := &GameView{
console: console,
@ -72,10 +74,7 @@ func (view *GameView) ListenToInputChannel() {
// Enter enter the game view.
func (view *GameView) Enter() {
// view.console.SetAudioChannel(view.audio.channel)
// view.console.SetAudioSampleRate(view.audio.sampleRate)
view.console.SetAudioSampleRate(48000)
view.console.SetAudioSampleRate(SampleRate)
view.console.SetAudioChannel(view.audioChanel)
// load state
@ -124,11 +123,10 @@ func (view *GameView) updateControllers() {
// First 8 keys are player 1
var player1Keys [8]bool
copy(player1Keys[:], view.keyPressed[:8])
var player2Keys [8]bool
copy(player2Keys[:], view.keyPressed[8:])
view.console.Controller1.SetButtons(player1Keys)
view.console.Controller2.SetButtons(player2Keys)
}

View file

@ -139,17 +139,11 @@ func (w *WebRTC) StartClient(remoteSession string, width, height int) (string, e
w.connection, err = webrtc.NewPeerConnection(config)
// m := webrtc.MediaEngine{}
// m.RegisterCodec(webrtc.NewRTPOpusCodec(webrtc.DefaultPayloadTypeOpus, 48000))
// m.RegisterCodec(webrtc.NewRTPVP8Codec(webrtc.DefaultPayloadTypeVP8, 1))
// api := webrtc.NewAPI(webrtc.WithMediaEngine(m))
// w.connection, err = api.NewPeerConnection(config)
if err != nil {
return "", err
}
vp8Track, err := w.connection.NewTrack(webrtc.DefaultPayloadTypeVP8, rand.Uint32(), "video", "pion2a")
vp8Track, err := w.connection.NewTrack(webrtc.DefaultPayloadTypeVP8, rand.Uint32(), "video", "pion2")
if err != nil {
return "", err
}
@ -159,14 +153,7 @@ func (w *WebRTC) StartClient(remoteSession string, width, height int) (string, e
}
opusTrack, err := w.connection.NewTrack(webrtc.DefaultPayloadTypeOpus, rand.Uint32(), "audio", "pion2b")
if err != nil {
return "", err
}
_, err = w.connection.AddTrack(opusTrack)
if err != nil {
return "", err
}
audioTrack, err := w.connection.CreateDataChannel("foo2", nil)
// WebRTC state callback
w.connection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
@ -175,7 +162,7 @@ func (w *WebRTC) StartClient(remoteSession string, width, height int) (string, e
go func() {
w.isConnected = true
log.Println("ConnectionStateConnected")
w.startStreaming(vp8Track, opusTrack)
w.startStreaming(vp8Track, audioTrack)
}()
}
@ -189,6 +176,7 @@ func (w *WebRTC) StartClient(remoteSession string, width, height int) (string, e
log.Println(iceCandidate)
})
// Data channel callback
w.connection.OnDataChannel(func(d *webrtc.DataChannel) {
log.Printf("New DataChannel %s %d\n", d.Label(), d.ID())
@ -257,7 +245,8 @@ func (w *WebRTC) IsClosed() bool {
return w.isClosed
}
func (w *WebRTC) startStreaming(vp8Track *webrtc.Track, opusTrack *webrtc.Track) {
// func (w *WebRTC) startStreaming(vp8Track *webrtc.Track, opusTrack *webrtc.Track) {
func (w *WebRTC) startStreaming(vp8Track *webrtc.Track, audioTrack *webrtc.DataChannel) {
log.Println("Start streaming")
// send screenshot
go func() {
@ -281,8 +270,7 @@ func (w *WebRTC) startStreaming(vp8Track *webrtc.Track, opusTrack *webrtc.Track)
go func() {
for w.isConnected {
data := <-w.AudioChannel
opusTrack.Write(data)
// opusTrack.WriteSample(media.Sample{Data: data, Samples: uint32(len(data))})
audioTrack.Send(data)
}
}()