can take a long while, bad in that we loose some of the async requirements... as things are in a thread, do requests web synchronously, we no longer have to raise a Defered.. I'm not happy, but it'll take work to properly be a mix between the two.. [git-p4: depot-paths = "//depot/": change = 1308]main
@@ -31,7 +31,7 @@ from elementtree.ElementTree import Element, SubElement, tostring | |||
from upnp import UPnPPublisher, errorCode | |||
from DIDLLite import DIDLElement, Container, Movie, Resource, MusicTrack | |||
from twisted.internet import defer | |||
from twisted.internet import defer, threads | |||
from twisted.python import failure | |||
import debug | |||
@@ -76,6 +76,13 @@ class doRecall(defer.Deferred): | |||
else: | |||
self.defer.callback(ret) | |||
@staticmethod | |||
def wrapper(fun, *args, **kwargs): | |||
try: | |||
return fun(*args, **kwargs) | |||
except defer.Deferred, x: | |||
return doRecallgen(x, fun, *args, **kwargs) | |||
def doRecallgen(defer, fun, *args, **kwargs): | |||
i = doRecall(defer, fun, *args, **kwargs) | |||
return i.defer | |||
@@ -186,14 +193,12 @@ class ContentDirectoryControl(UPnPPublisher, dict): | |||
'StartingIndex=%s RequestedCount=%s SortCriteria=%s)' % \ | |||
tuple(map(repr, args)) | |||
try: | |||
ret = self.thereal_soap_Browse(*args) | |||
except defer.Deferred, x: | |||
ret = doRecallgen(x, self.soap_Browse, *args) | |||
l['response'] = ret | |||
def setresp(r): | |||
l['response'] = r | |||
return r | |||
return ret | |||
return threads.deferToThread(self.thereal_soap_Browse, | |||
*args).addCallback(setresp) | |||
def thereal_soap_Browse(self, *args): | |||
"""Required: Incrementally browse the native heirachy of the Content | |||
@@ -14,6 +14,7 @@ from twisted.internet import reactor | |||
from twisted.python import log | |||
import twisted.web | |||
import urlparse | |||
import urllib2 | |||
def getPage(url, contextFactory=None, *args, **kwargs): | |||
"""Download a web page as a string. | |||
@@ -178,15 +179,15 @@ class PYVR(Container): | |||
Container.__init__(self, *args, **kwargs) | |||
self.pathObjmap = {} | |||
self.pend = None | |||
#self.pend = None | |||
self.lastmodified = None | |||
self.newobjs = None | |||
self.objs = {} | |||
self.lastcheck = 0 | |||
def checkUpdate(self): | |||
if self.pend is not None: | |||
raise self.pend | |||
#if self.pend is not None: | |||
# raise self.pend | |||
if time.time() - self.lastcheck < 10: | |||
return | |||
@@ -194,45 +195,50 @@ class PYVR(Container): | |||
# Check to see if any changes have been made | |||
self.runCheck() | |||
raise self.pend | |||
def runCheck(self): | |||
print 'runCheck' | |||
self.page = getPage(self.url, method='HEAD') | |||
self.page.deferred.addErrback(self.errCheck).addCallback( | |||
self.doCheck) | |||
self.pend = self.page.deferred | |||
while True: | |||
try: | |||
self.page = urllib2.urlopen(self.url) | |||
break | |||
except urllib2.HTTPError: | |||
time.sleep(.1) | |||
#self.page = getPage(self.url, method='HEAD') | |||
#self.page.deferred.addErrback(self.errCheck).addCallback( | |||
# self.doCheck) | |||
#self.pend = self.page.deferred | |||
return self.doCheck(self.page) | |||
def errCheck(self, x): | |||
print 'errCheck:', `x` | |||
self.runCheck() | |||
def doCheck(self, x): | |||
print 'doCheck:', self.page.status | |||
if self.page.status != '200': | |||
print 'foo' | |||
return reactor.callLater(.01, self.runCheck) | |||
#print 'doCheck:', self.page.status | |||
#if self.page.status != '200': | |||
# print 'foo' | |||
# return reactor.callLater(.01, self.runCheck) | |||
self.lastcheck = time.time() | |||
slm = self.page.response_headers['last-modified'] | |||
slm = self.page.info()['last-modified'] | |||
if slm == self.lastmodified: | |||
# Page the same, don't do anything | |||
self.pend = None | |||
#self.pend = None | |||
return | |||
self.page = getPage(self.url) | |||
self.page.deferred.addCallback(self.parsePage) | |||
self.pend = self.page.deferred | |||
return self.parsePage(self.page) | |||
#self.page = getPage(self.url) | |||
#self.page.deferred.addCallback(self.parsePage) | |||
#self.pend = self.page.deferred | |||
return self.pend | |||
#return self.pend | |||
def parsePage(self, page): | |||
slm = self.page.response_headers['last-modified'] | |||
slm = self.page.info()['last-modified'] | |||
self.lastmodified = slm | |||
del self.page | |||
self.pend = None | |||
#self.pend = None | |||
self.newobjs = recxmltoobj(page) | |||
self.newobjs = recxmltoobj(page.read()) | |||
self.doUpdate() | |||
def doUpdate(self): | |||
@@ -303,13 +303,14 @@ class ShoutStation(ShoutURL): | |||
kwargs['mimetype'] = self.station['MimeType'].encode('ascii') | |||
kwargs['bitrate'] = self.station['Bitrate'] * 128 # 1024k / 8bit | |||
ShoutURL.__init__(*args, **kwargs) | |||
ShoutURL.__init__(self, *args, **kwargs) | |||
class ShoutGenre(MusicGenre): | |||
def __init__(self, *args, **kwargs): | |||
self.genre = kwargs['genre'] | |||
del kwargs['genre'] | |||
self.feeds = ShoutcastFeedAsync(self.genre) | |||
#self.feeds = ShoutcastFeedAsync(self.genre) | |||
self.feeds = feeds.ShoutcastFeed(self.genre) | |||
self.sl = None | |||
self.pathObjmap = {} | |||
@@ -376,7 +377,8 @@ class ShoutCast(Container): | |||
def __init__(self, *args, **kwargs): | |||
Container.__init__(self, *args, **kwargs) | |||
self.genres = GenreFeedAsync() | |||
#self.genres = GenreFeedAsync() | |||
self.genres = feeds.GenreFeed() | |||
self.genre_list = None | |||
self.pathObjmap = {} | |||
@@ -402,7 +404,7 @@ class ShoutCast(Container): | |||
continue | |||
doupdate = True | |||
self.pathObjmap[i] = self.cd.addItem(self.id, | |||
ShoutGenre, i, genre = i) | |||
ShoutGenre, i, genre=i) | |||
self.genre_list = nl | |||