Browse Source

add support for wax/asx files...

[git-p4: depot-paths = "//depot/": change = 1373]
main
John-Mark Gurney 15 years ago
parent
commit
38c4e6f212
2 changed files with 38 additions and 11 deletions
  1. +1
    -0
      README
  2. +37
    -11
      shoutcast.py

+ 1
- 0
README View File

@@ -99,6 +99,7 @@ v0.x:
creating the files.
If we get an error parsing the genres of ShoutCAST, try again.
Print out the modules that failed to load.
Add support for WAX/ASX files in shoutcast.

v0.5:
Support multiple SSDP servers on the same box.


+ 37
- 11
shoutcast.py View File

@@ -17,6 +17,7 @@ import os.path
import random
import time
import traceback
import xml.dom.minidom

from py_shoutcast import *

@@ -203,25 +204,50 @@ class ShoutProxy(resource.Resource):
map(fun, self.afterurls)
self.afterurls = None

def gotPLS(self, page):
def gotURL(self, page):
self.fetchingurls = False
try:
urls = self.parsePLS(page)
except ConfigParser.MissingSectionHeaderError:
urls = self.parseWAX(page)

#log.msg('pls urls:', self.urls)
self.urls = urls
self.urlpos = random.randrange(len(self.urls))

self.triggerdefered(lambda x: x.callback(True))

def parseWAX(self, page):
print 'trying WAX'
dom = xml.dom.minidom.parseString(page)
rootel = dom.documentElement
if rootel.nodeName != 'asx':
raise ValueError('Only asx allowed, got %s' %
`rootel.nodeName`)

urls = []
for i in rootel.getElementsByTagName('entry'):
urls.extend(str(x.getAttribute('href')) for x in
i.getElementsByTagName('ref'))

print 'returning:', `urls`
return urls

def parsePLS(self, page):
pls = ConfigParser.SafeConfigParser()
pls.readfp(StringIO.StringIO(page))
# KCSM 91.1 doesn't provide a version
#assert pls.getint(PLSsection, 'Version') == 2
assert pls.has_option(PLSsection, 'numberofentries')
cnt = pls.getint(PLSsection, 'numberofentries')
self.urls = []
urls = []
for i in range(cnt):
i += 1 # stupid one based arrays
self.urls.append(pls.get(PLSsection,
urls.append(pls.get(PLSsection,
'File%d' % i))
#log.msg('pls urls:', self.urls)
self.urlpos = random.randrange(len(self.urls))

self.triggerdefered(lambda x: x.callback(True))
return urls

def errPLS(self, failure):
def errURL(self, failure):
self.fetchingurls = False
# XXX - retry?
self.triggerdefered(lambda x: x.errback(failure))
@@ -255,8 +281,8 @@ class ShoutProxy(resource.Resource):
# shouldn't getPage do proper escaping for me?
self.afterurls = [ defer.Deferred() ]
d = getPage(self.shoutpls.encode('ascii'))
d.addCallback(self.gotPLS)
d.addErrback(self.errPLS)
d.addCallback(self.gotURL)
d.addErrback(self.errURL)
else:
self.afterurls.append(defer.Deferred())
# Always add the callback if we don't have urls
@@ -269,7 +295,7 @@ class ShoutProxy(resource.Resource):
# and make sure the connection doesn't get closed
return server.NOT_DONE_YET

synchronized = [ 'gotPLS', 'render', 'startNextConnection',
synchronized = [ 'gotURL', 'render', 'startNextConnection',
'triggerdefered', ]
threadable.synchronize(ShoutProxy)



Loading…
Cancel
Save