audio received from the server currently (requires a demo-instruct.wav file), but should have ICE working (cross nat), despite the fact that aiortc doesn't follow the WebRTC API for Ice...main
@@ -5,3 +5,4 @@ node_modules | |||
yarn-error.log | |||
yarn.lock | |||
.DS_Store | |||
server/p |
@@ -11,11 +11,14 @@ | |||
</head> | |||
<body> | |||
<p></p> | |||
<p> | |||
<div id="constatus">undefined</div> | |||
<audio id="audioSink" autoplay></audio> | |||
</p> | |||
<script type="text/javascript" src="jamming.js"></script> | |||
<script type="text/javascript"> | |||
runPage() | |||
//runPage() | |||
/* parameters */ | |||
var params = { | |||
@@ -25,7 +28,7 @@ var params = { | |||
window.AudioContext = window.AudioContext || window.webkitAudioContext; | |||
var audioContext = new AudioContext(); | |||
//var audioContext = new AudioContext(); | |||
recconf = { | |||
samplerRate: 8000, | |||
@@ -39,8 +39,10 @@ | |||
"mocha": "^7.1.1", | |||
"nyc": "^15.0.1", | |||
"sinon": "^9.0.2", | |||
"uuid": "^7.0.3", | |||
"webpack": "^4.42.1", | |||
"webpack-cli": "^3.3.11" | |||
"webpack-cli": "^3.3.11", | |||
"websocket-as-promised": "^1.0.1" | |||
}, | |||
"dependencies": { | |||
"inline-worker": "https://github.com/mohayonao/inline-worker.git#7014cd64c3cd6eb884f6743aad682a995e262bb9" | |||
@@ -0,0 +1,13 @@ | |||
VIRTUALENV ?= virtualenv | |||
VRITUALENVARGS = | |||
FILES=server.py | |||
test: | |||
(echo $(FILES) | entr sh -c 'make test-noentr | |||
test-noentr: | |||
python -m coverage run -m unittest server && coverage report --omit=p/\* -m -i | |||
env: | |||
($(VIRTUALENV) $(VIRTUALENVARGS) p && . ./p/bin/activate && pip install -r requirements.txt) |
@@ -0,0 +1,2 @@ | |||
git+https://github.com/aiortc/aiortc.git#egg=aiortc | |||
aiohttp |
@@ -0,0 +1,159 @@ | |||
import aiohttp | |||
import json | |||
import logging | |||
import os.path | |||
import uuid | |||
from aiohttp import web | |||
from aiortc import RTCPeerConnection, RTCIceCandidate, RTCSessionDescription | |||
from aiortc.contrib.media import MediaPlayer | |||
logger = logging.getLogger('pc') | |||
logger.setLevel(logging.INFO) | |||
# Implement https://w3c.github.io/webrtc-pc/#constructor-0 per the | |||
# spec. | |||
from aioice.candidate import Candidate | |||
from aiortc.rtcicetransport import candidate_from_aioice | |||
def RealRTCIceCandidate(candidateInitDict): | |||
candpref = 'candidate:' | |||
candstr = candidateInitDict['candidate'] | |||
if not candstr.startswith(candpref): | |||
raise ValueError('does not start with proper string') | |||
candstr = candstr[len(candpref):] | |||
cand = Candidate.from_sdp(candstr) | |||
ric = candidate_from_aioice(cand) | |||
ric.sdpMid = candidateInitDict['sdpMid'] | |||
ric.sdpMLineIndex = candidateInitDict['sdpMLineIndex'] | |||
# XXX - exists as part of RTCIceParameters | |||
#ric.usernameFragment = candidateInitDict['usernameFragment'] | |||
return ric | |||
class AudioMixer(object): | |||
@property | |||
def audio(self): | |||
'''The output audio track for this mixing.''' | |||
def addTrack(self, track): | |||
'''Add an import track that will be mixed with | |||
the other tracks.''' | |||
mixer = AudioMixer() | |||
pcs = set() | |||
shutdown = False | |||
ROOT = os.path.dirname(__file__) | |||
async def index(request): | |||
content = open(os.path.join(ROOT, '..', 'dist', 'audiotest.html'), 'r').read() | |||
return web.Response(content_type='text/html', text=content) | |||
async def jammingjs(request): | |||
content = open(os.path.join(ROOT, '..', 'dist', 'jamming.js'), 'r').read() | |||
return web.Response(content_type='application/javascript', text=content) | |||
# XXX - update hander to pass uuid and meeting id in the url | |||
async def ws_handler(request): | |||
ws = web.WebSocketResponse() | |||
await ws.prepare(request) | |||
pc_id = str(uuid.uuid4()) | |||
def log_info(msg, *args): | |||
#print(repr(msg), repr(args)) | |||
# shouldn't be warning, but can't get logging working otherwise | |||
logger.warning(pc_id + " " + msg, *args) | |||
log_info("Created for %s", request.remote) | |||
doexit = False | |||
async for msg in ws: | |||
if doexit: | |||
break | |||
if msg.type == aiohttp.WSMsgType.TEXT: | |||
data = json.loads(msg.data) | |||
log_info('got msg: %s', repr(data)) | |||
if 'sdp' in data: | |||
offer = RTCSessionDescription( | |||
sdp=data['sdp'], type=data['type']) | |||
elif 'ice' in data: | |||
pc.addIceCandidate(RealRTCIceCandidate(data['ice'])) | |||
continue | |||
pc = RTCPeerConnection() | |||
# add to the currect set | |||
pcs.add(pc) | |||
@pc.on("datachannel") | |||
def on_datachannel(channel): | |||
@channel.on("message") | |||
def on_message(message): | |||
if isinstance(message, str) and message.startswith("ping"): | |||
channel.send("pong" + message[4:]) | |||
@pc.on("iceconnectionstatechange") | |||
async def on_iceconnectionstatechange(): | |||
log_info("ICE connection state is %s", pc.iceConnectionState) | |||
if pc.iceConnectionState == "failed": | |||
await pc.close() | |||
pcs.discard(pc) | |||
doexit = True | |||
mixer = MediaPlayer('demo-instruct.wav') | |||
@pc.on("track") | |||
def on_track(track): | |||
log_info("Track %s received", track.kind) | |||
if track.kind == "audio": | |||
pc.addTrack(mixer.audio) | |||
#mixer.addTrack(track) | |||
@track.on("ended") | |||
async def on_ended(): | |||
log_info("Track %s ended", track.kind) | |||
# XXX likely not correct | |||
await mixer.stop() | |||
log_info("Got offer: %s", repr(offer)) | |||
# handle offer | |||
await pc.setRemoteDescription(offer) | |||
# send answer | |||
answer = await pc.createAnswer() | |||
await pc.setLocalDescription(answer) | |||
await ws.send_str(json.dumps({ | |||
"sdp": pc.localDescription.sdp, | |||
"type": pc.localDescription.type, | |||
})) | |||
elif msg.type == aiohttp.WSMsgType.ERROR: | |||
print('ws connection closed with exception %s' % | |||
ws.exception()) | |||
print('websocket connection closed') | |||
return ws | |||
async def on_shutdown(app): | |||
shutdown = True | |||
# close peer connections | |||
coros = [pc.close() for pc in pcs] | |||
await asyncio.gather(*coros) | |||
pcs.clear() | |||
def main(): | |||
app = web.Application() | |||
app.on_shutdown.append(on_shutdown) | |||
app.router.add_get("/", index) | |||
app.router.add_get("/jamming.js", jammingjs) | |||
app.router.add_get("/ws", ws_handler) | |||
web.run_app(app, access_log=None, port=23854, ssl_context=None) | |||
if __name__ == '__main__': | |||
main() |
@@ -1,17 +1,193 @@ | |||
const jamming = require("./jamming"); | |||
import jamming from './jamming'; | |||
import { v4 as uuidv4 } from 'uuid'; | |||
import WebSocketAsPromised from 'websocket-as-promised'; | |||
function sendServer(obj) { | |||
let lclobj = Object.assign({}, obj); | |||
lclobj.uuid = uuid; | |||
return fetch('/offer', { | |||
body: JSON.stringify(lclobj), | |||
headers: { | |||
'Content-Type': 'application/json' | |||
}, | |||
method: 'POST' | |||
}); | |||
} | |||
async function runPage() { | |||
var constraints = { audio: true }; | |||
const uuid = uuidv4(); | |||
const constatus = document.getElementById('constatus'); | |||
const audioSink = document.getElementById('audioSink'); | |||
var stream; | |||
var wsp; | |||
var pc; | |||
let stream = null; | |||
const constraints = { | |||
audio: { | |||
latency: .005, /* 5ms latency */ | |||
channelCount: 1, | |||
noiseSuppression: false, | |||
autoGainControl: false, | |||
sampleRate: { min: 22050, max: 48000, ideal: 32000 }, | |||
} | |||
}; | |||
/* setup local media */ | |||
try { | |||
stream = await navigator.mediaDevices.getUserMedia(constraints); | |||
console.log('got stream'); | |||
} catch(err) { | |||
console.log('got error'); | |||
constatus.textContent = 'Unable to open microphone'; | |||
return | |||
} | |||
/* setup server messages */ | |||
wsp = new WebSocketAsPromised('ws://' + window.location.host + '/ws', { | |||
createWebSocket: url => new WebSocket(url), | |||
extractMessageData: event => event, | |||
}); | |||
wsp.onError.addListener((err) => { | |||
constatus.textContent = 'connection to server lost'; | |||
}); | |||
wsp.onMessage.addListener((message) => { | |||
var msg = JSON.parse(message.data); | |||
console.log('got message via ws:', msg); | |||
if (msg.uuid == uuid) return; | |||
if (msg.sdp) { | |||
pc.setRemoteDescription(new RTCSessionDescription(msg)); | |||
} else if (msg.ice) { | |||
pc.addIceCandidate(new RTCIceCandidate(msg.ice)); | |||
} | |||
}); | |||
await wsp.open(); | |||
function sendServer(obj) { | |||
var lclobj = Object.assign({}, obj); | |||
lclobj.uuid = uuid; | |||
console.log('send:', lclobj); | |||
wsp.send(JSON.stringify(lclobj)); | |||
} | |||
/* we are initiator */ | |||
const configuration = { | |||
iceServers: [ { | |||
urls: [ | |||
'stun:stun3.l.google.com:19302', | |||
/* reduce number of stun servers | |||
'stun:stun.l.google.com:19302', | |||
'stun:stun1.l.google.com:19302', | |||
'stun:stun2.l.google.com:19302', | |||
'stun:stun4.l.google.com:19302', | |||
*/ | |||
'stun:stun.services.mozilla.com', | |||
] | |||
} ] | |||
}; | |||
pc = new RTCPeerConnection(configuration); | |||
pc.onicecandidate = (event) => { | |||
if (event.candidate != null) { | |||
console.log(event.candidate) | |||
sendServer({ ice: event.candidate }); | |||
} | |||
}; | |||
pc.ontrack = (event) => { | |||
audioSink.srcObject = event.streams[0]; | |||
}; | |||
pc.addStream(stream); | |||
try { | |||
var desc = await pc.createOffer() | |||
} catch(err) { | |||
constatus.textContent = 'failed to create offer for server: ' + err; | |||
return | |||
} | |||
/* do description filtering here */ | |||
await pc.setLocalDescription(desc); | |||
var ld = pc.localDescription; | |||
sendServer({ sdp: ld.sdp, type: ld.type }); | |||
} | |||
runPage() | |||
// #4 of https://stackoverflow.com/questions/37656592/define-global-variable-with-webpack | |||
global.runPage = runPage; | |||
async function foo() { | |||
var cert = await RTCPeerConnection.generateCertificate({ | |||
name: "ECDSA", namedCurve: "P-256", | |||
hash: 'SHA-256' | |||
}); | |||
// global.pc = new RTCPeerConnection({certificates: [cert]}); | |||
} | |||
async function bar() { | |||
const signaling = new SignalingChannel(); // handles JSON.stringify/parse | |||
const configuration = { | |||
iceServers: [ { | |||
urls: [ | |||
'stun.l.google.com:19302', | |||
'stun1.l.google.com:19302', | |||
'stun2.l.google.com:19302', | |||
'stun3.l.google.com:19302', | |||
'stun4.l.google.com:19302', | |||
'stun:stun.services.mozilla.com', | |||
] | |||
} ] | |||
}; | |||
let pc, channel; | |||
// call start() to initiate | |||
function start() { | |||
pc = new RTCPeerConnection(configuration); | |||
// send any ice candidates to the other peer | |||
pc.onicecandidate = ({candidate}) => signaling.send({candidate}); | |||
// let the "negotiationneeded" event trigger offer generation | |||
pc.onnegotiationneeded = async () => { | |||
try { | |||
await pc.setLocalDescription(); | |||
// send the offer to the other peer | |||
signaling.send({description: pc.localDescription}); | |||
} catch (err) { | |||
console.error(err); | |||
} | |||
}; | |||
// create data channel and setup chat using "negotiated" pattern | |||
channel = pc.createDataChannel('chat', {negotiated: true, id: 0}); | |||
channel.onopen = () => input.disabled = false; | |||
channel.onmessage = ({data}) => showChatMessage(data); | |||
input.onkeypress = ({keyCode}) => { | |||
// only send when user presses enter | |||
if (keyCode != 13) return; | |||
channel.send(input.value); | |||
} | |||
} | |||
signaling.onmessage = async ({data: {description, candidate}}) => { | |||
if (!pc) start(false); | |||
try { | |||
if (description) { | |||
await pc.setRemoteDescription(description); | |||
// if we got an offer, we need to reply with an answer | |||
if (description.type == 'offer') { | |||
await pc.setLocalDescription(); | |||
signaling.send({description: pc.localDescription}); | |||
} | |||
} else if (candidate) { | |||
await pc.addIceCandidate(candidate); | |||
} | |||
} catch (err) { | |||
console.error(err); | |||
} | |||
}; | |||
} |