Import current version from CVS
This commit is contained in:
commit
381f0a2e20
2 changed files with 605 additions and 0 deletions
36
tivodb
Executable file
36
tivodb
Executable file
|
@ -0,0 +1,36 @@
|
|||
#!/usr/local/bin/python
|
||||
|
||||
import anydbm
|
||||
import getopt
|
||||
import operator
|
||||
import os
|
||||
import sys
|
||||
|
||||
def usage():
|
||||
print >>sys.stderr, "usage: dbtool {-a entry|-d entry|-l}"
|
||||
|
||||
try:
|
||||
optlist, args = getopt.getopt(sys.argv[1:], "a:d:lk")
|
||||
except getopt.GetoptError, err:
|
||||
print >>sys.stderr, str(err)
|
||||
usage()
|
||||
sys.exit(64)
|
||||
if len(args) != 0 or len(optlist) != 1:
|
||||
usage()
|
||||
sys.exit(64)
|
||||
|
||||
downloaddb = anydbm.open(os.path.expanduser("~") + "/.tivo/downloads.db", "c")
|
||||
|
||||
for (o, a) in optlist:
|
||||
if o == "-l":
|
||||
for i in sorted(downloaddb.keys()):
|
||||
print "%s:\t%s" % (i, downloaddb[i])
|
||||
elif o == "-k":
|
||||
for (k, v) in sorted(downloaddb.items(), key=operator.itemgetter(1)):
|
||||
print "%s:\t%s" % (k, v)
|
||||
elif o == "-d":
|
||||
del downloaddb[a]
|
||||
elif o == "-a":
|
||||
downloaddb[a] = "manually added"
|
||||
|
||||
downloaddb.close()
|
569
tivomirror
Executable file
569
tivomirror
Executable file
|
@ -0,0 +1,569 @@
|
|||
#!/usr/local/bin/python
|
||||
|
||||
# Stefans Script, um die Sendungen vom Tivo runterzuladen und in MPEG4
|
||||
# zu transkodieren.
|
||||
# Wird auf disklesslibber per Crontab-Eintrag stuendlich gestartet:
|
||||
# flock -n /tmp/tivomirror.log -c 'tivomirror >.tivomirror.log 2>&1 </dev/null'
|
||||
|
||||
import sys
|
||||
reload(sys)
|
||||
sys.setdefaultencoding('utf-8')
|
||||
|
||||
import anydbm
|
||||
import cookielib
|
||||
import datetime
|
||||
import getopt
|
||||
import errno
|
||||
import functools
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import signal
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import urllib2
|
||||
import xml.dom.minidom
|
||||
import tivomp4
|
||||
|
||||
host = "tivo.lassitu.de"
|
||||
#host = "wavehh.lassitu.de:30080"
|
||||
mak = "7194378159"
|
||||
targetdir = "/p2/media/video/TV"
|
||||
gig = 1024.0 * 1024 * 1024
|
||||
minfree = 10 * gig
|
||||
ignoreepisodetitle = False
|
||||
|
||||
arset = dict()
|
||||
|
||||
includes = dict()
|
||||
includes['Anthony Bourdain Parts Unknown'] = 1
|
||||
includes['Better Call Saul'] = 1
|
||||
includes['Brooklyn Nine-Nine'] = 1
|
||||
includes['Bull'] = 1
|
||||
includes['College Football'] = 1
|
||||
includes['Conan'] = 1
|
||||
includes["Dirk Gently's Holistic Detective Agency"] = 1
|
||||
includes['The Expanse'] = 1
|
||||
includes['Family Guy'] = 1
|
||||
includes['Full Frontal With Samantha Bee'] = 1
|
||||
includes['Hot in Cleveland'] = 1
|
||||
includes["How It's Made"] = 1
|
||||
includes["How Do They Do It?"] = 1
|
||||
includes["How We Got to Now With Steven Johnson"] = 1
|
||||
includes['Inside Amy Schumer'] = 1
|
||||
includes['Join or Die With Craig Ferguson'] = 1
|
||||
includes['Last Week Tonight With John Oliver'] = 1
|
||||
includes['Louie'] = 1
|
||||
includes['Mad Men'] = 1
|
||||
includes['Modern Family'] = 1
|
||||
includes['MythBusters'] = 1
|
||||
includes['MythBusters: The Search'] = 1
|
||||
includes['NCIS'] = 1
|
||||
includes['NCIS: New Orleans'] = 1
|
||||
#includes['NFL Football'] = 1
|
||||
includes['Person of Interest'] = 1
|
||||
includes['Saturday Night Live'] = 1
|
||||
includes['Sesame Street'] = 1
|
||||
includes["Somebody's Gotta Do It With Mike Rowe"] = 1
|
||||
includes['StarTalk'] = 1
|
||||
includes['The Big Bang Theory'] = 1
|
||||
includes['The Daily Show With Trevor Noah'] = 1
|
||||
includes['The Late Show With Stephen Colbert'] = 1
|
||||
#includes['The Late Late Show With James Corden'] = 1
|
||||
includes['The Muppets'] = 1
|
||||
includes['The X-Files'] = 1
|
||||
#includes['The Tonight Show Starring Jimmy Fallon'] = 1
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger('tivomirror')
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
class flushfile(object):
|
||||
def __init__(self, f):
|
||||
self.f = f
|
||||
def write(self, x):
|
||||
self.f.write(x)
|
||||
self.f.flush()
|
||||
sys.stdout = flushfile(sys.stdout)
|
||||
|
||||
tmp = "/tmp"
|
||||
|
||||
# prepare global requests sesssion to download the TOC and the episodes
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
session = requests.session()
|
||||
session.verify = False
|
||||
session.auth = requests.auth.HTTPDigestAuth("tivo", mak)
|
||||
session.keep_alive = False
|
||||
|
||||
|
||||
class TimeoutError(Exception):
|
||||
pass
|
||||
|
||||
def timeout(seconds=10, error_message=os.strerror(errno.ETIMEDOUT)):
|
||||
def decorator(func):
|
||||
def _handle_timeout(signum, frame):
|
||||
raise TimeoutError(error_message)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
signal.signal(signal.SIGALRM, _handle_timeout)
|
||||
signal.alarm(seconds)
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
finally:
|
||||
signal.alarm(0)
|
||||
return result
|
||||
|
||||
return functools.wraps(func)(wrapper)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def trimDescription(desc):
|
||||
desc = desc.strip()
|
||||
i = desc.rfind(". Copyright Tribune Media Services, Inc.");
|
||||
if i > 0:
|
||||
desc = desc[0:i]
|
||||
i = desc.rfind(". * Copyright Rovi, Inc");
|
||||
if i > 0:
|
||||
desc = desc[0:i]
|
||||
if len(desc) > 80:
|
||||
desc = desc[0:80]
|
||||
return desc
|
||||
|
||||
class TivoException(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
|
||||
class TivoItem:
|
||||
def __init__(self, i):
|
||||
self.title = getTagText(i, "Title")
|
||||
self.episode = getTagText(i, "EpisodeTitle")
|
||||
self.episodeNumber = getTagText(i, "EpisodeNumber")
|
||||
self.description = trimDescription(getTagText(i, "Description"))
|
||||
d = getTagText(i, "CaptureDate")
|
||||
self.date = datetime.datetime.utcfromtimestamp(int(d, 16))
|
||||
self.time = int(d, base=0)
|
||||
self.datestr = self.date.strftime("%Y%m%d-%H%M")
|
||||
self.url = getTagText(i, "Url")
|
||||
self.inprogress = getTagText(i, "InProgress")
|
||||
self.available = getTagText(i, "Available")
|
||||
self.sourcesize = int(getTagText(i, "SourceSize"))
|
||||
self.highdef = getTagText(i, "HighDefinition")
|
||||
self.ar = 43
|
||||
self.unique = True
|
||||
if arset.has_key(self.title):
|
||||
self.ar = arset[self.title]
|
||||
elif self.highdef == "Yes":
|
||||
self.ar = "hd"
|
||||
if ignoreepisodetitle:
|
||||
self.episode = self.datestr
|
||||
if self.episode == "":
|
||||
if self.description != "":
|
||||
self.episode = self.description
|
||||
else:
|
||||
self.episode = self.datestr
|
||||
self.formatnames()
|
||||
def makeNotUnique(self):
|
||||
self.unique = False
|
||||
self.formatnames()
|
||||
def formatnames(self):
|
||||
if self.episodeNumber and self.episodeNumber != u'0':
|
||||
en = int(self.episodeNumber)
|
||||
if en >= 100:
|
||||
self.name = "%s S%02dE%02d %s" % (self.title, en / 100, en % 100, self.episode)
|
||||
else:
|
||||
self.name = "%s E%s %s" % (self.title, self.episodeNumber, self.episode)
|
||||
elif self.unique:
|
||||
self.name = "%s - %s" % (self.title, self.episode)
|
||||
else:
|
||||
self.name = "%s - %s - %s" % (self.title, self.datestr, self.episode)
|
||||
self.dir = "%s/%s" % (targetdir, re.sub("[:/]", "-", self.title))
|
||||
self.file = "%s/%s" % (self.dir, re.sub("[:/]", "-", self.name))
|
||||
self.name = self.name.encode("utf-8");
|
||||
self.dir = self.dir.encode("utf-8");
|
||||
self.file = self.file.encode("utf-8");
|
||||
def __str__(self):
|
||||
return repr(self.title)
|
||||
|
||||
|
||||
class TivoToc:
|
||||
def __init__(self):
|
||||
self.dom = None
|
||||
self.filename = "toc.xml"
|
||||
self.uniquedb = anydbm.open("unique.db", "c")
|
||||
self.items = []
|
||||
pass
|
||||
|
||||
def load(self):
|
||||
fd = open(self.filename, "r")
|
||||
self.dom = xml.dom.minidom.parseString(fd.read())
|
||||
fd.close()
|
||||
return self.dom
|
||||
|
||||
def save(self):
|
||||
fd = open(self.filename, "w")
|
||||
fd.write(self.dom.toprettyxml())
|
||||
fd.close()
|
||||
|
||||
def download_chunk(self, offset):
|
||||
global session
|
||||
|
||||
params = {
|
||||
'Command': 'QueryContainer',
|
||||
'Container': '/NowPlaying',
|
||||
'Recurse': 'Yes',
|
||||
'ItemCount': '50',
|
||||
'AnchorOffset': offset
|
||||
}
|
||||
url = "https://{}/TiVoConnect".format(host)
|
||||
logger.debug(" offset %d" % (offset))
|
||||
r = session.get(url, params=params, timeout=30, verify=False)
|
||||
if r.status_code != 200:
|
||||
r.raise_for_status()
|
||||
return r.text
|
||||
|
||||
def download(self):
|
||||
offset = 0
|
||||
itemCount = 1
|
||||
self.dom = None
|
||||
root = None
|
||||
logger.info("*** Getting listing")
|
||||
while itemCount > 0:
|
||||
dom = xml.dom.minidom.parseString(self.download_chunk(offset))
|
||||
if self.dom == None:
|
||||
self.dom = dom
|
||||
root = self.dom.childNodes.item(0)
|
||||
else:
|
||||
for child in dom.childNodes.item(0).childNodes:
|
||||
if child.nodeName == "Item":
|
||||
root.appendChild(child.cloneNode(True))
|
||||
itemCount = int(getElementText(dom.documentElement.childNodes, "ItemCount"))
|
||||
offset += itemCount
|
||||
return self.dom
|
||||
|
||||
def getItems(self):
|
||||
self.titles = {}
|
||||
for node in self.dom.getElementsByTagName("Item"):
|
||||
item = TivoItem(node)
|
||||
self.items.append(item)
|
||||
if item.title not in self.titles:
|
||||
self.titles[item.title] = []
|
||||
self.titles[item.title].append(item)
|
||||
# see if we have items that end up having an identical name; mark
|
||||
# the program title in uniquedb if that's the case
|
||||
for title in self.titles:
|
||||
names = {}
|
||||
for item in self.titles[title]:
|
||||
if item.name not in names:
|
||||
names[item.name] = []
|
||||
names[item.name].append(item)
|
||||
for name in names:
|
||||
if len(names[name]) > 1:
|
||||
self.uniquedb[title.encode("utf-8")] = "1"
|
||||
if getattr(self.uniquedb, "sync", None) and callable(self.uniquedb.sync):
|
||||
self.uniquedb.sync()
|
||||
for item in self.items:
|
||||
if self.uniquedb.has_key(item.title.encode("utf-8")):
|
||||
item.makeNotUnique()
|
||||
return self.items
|
||||
|
||||
|
||||
def getText(nodelist):
|
||||
rc = ""
|
||||
for node in nodelist:
|
||||
if node.nodeType == node.TEXT_NODE:
|
||||
rc = rc + node.data
|
||||
return rc
|
||||
|
||||
def getTagText(element, tagname):
|
||||
try:
|
||||
return getText(element.getElementsByTagName(tagname)[0].childNodes)
|
||||
except IndexError:
|
||||
return ""
|
||||
|
||||
def getElementText(nodes, name):
|
||||
for node in nodes:
|
||||
if node.nodeType == xml.dom.Node.ELEMENT_NODE and node.nodeName == name:
|
||||
return getText(node.childNodes)
|
||||
return None
|
||||
|
||||
def getAvail(dir):
|
||||
s = os.statvfs(dir)
|
||||
return s.f_bsize * s.f_bavail
|
||||
|
||||
|
||||
def quit_process(pid):
|
||||
try:
|
||||
os.kill(pid, signal.SIGQUIT)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
class FdLogger(threading.Thread):
|
||||
def __init__(self, logger, lvl, fd):
|
||||
self.logger = logger
|
||||
self.lvl = lvl
|
||||
self.fd = fd
|
||||
threading.Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.start()
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
# for line in fd buffers, so use this instead
|
||||
for line in iter(self.fd.readline, b''):
|
||||
self.logger.log(self.lvl, ": %s", line.strip('\n'))
|
||||
self.fd.close()
|
||||
except Exception:
|
||||
self.logger.exception("")
|
||||
|
||||
|
||||
@timeout(43200)
|
||||
#@timeout(7200)
|
||||
def download(item, mak, target):
|
||||
global session
|
||||
count = 0
|
||||
start = time.time()
|
||||
upd = start
|
||||
url = item.url
|
||||
#url = re.sub("tivo.lassitu.de:80", "wavehh.lassitu.de:30080", url)
|
||||
#url = re.sub("wavehh.lassitu.de:80", "wavehh.lassitu.de:30080", url)
|
||||
#url = re.sub("tivo.lassitu.de:80", "localhost:8888", url)
|
||||
#url = re.sub("tivo.lassitu.de:80", "krokodil-vpn.zs64.net:8888", url)
|
||||
logger.info("--- downloading \"%s\"" % (url))
|
||||
start = time.time()
|
||||
r = session.get(url, stream=True, verify=False)
|
||||
#r = session.get(url, stream=True, proxies={"http":"http://wavehh:8888","https":"http://wavehh:8888"})
|
||||
r.raise_for_status()
|
||||
|
||||
try:
|
||||
p_decode = subprocess.Popen(["tivodecode", "--mak", mak, \
|
||||
"--no-verify", "--out", target, "-"], stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
FdLogger(logger, logging.INFO, p_decode.stdout)
|
||||
FdLogger(logger, logging.INFO, p_decode.stderr)
|
||||
def info(signum, frame):
|
||||
upd = time.time()
|
||||
dur = now - start
|
||||
mb = count / 1e6
|
||||
print "%5.1f%% %5.3f GB downloaded in %.0f min, %.3f MB/s" % (
|
||||
100.0 * count / item.sourcesize,
|
||||
mb / 1e3, dur / 60, mb / dur)
|
||||
try:
|
||||
signal.signal(signal.SIGINFO, info)
|
||||
except Exception:
|
||||
pass
|
||||
while True:
|
||||
time.sleep(0) # yield to logger threads
|
||||
chunk = r.raw.read(65536)
|
||||
if chunk:
|
||||
p_decode.stdin.write(chunk)
|
||||
else:
|
||||
break
|
||||
count += len(chunk)
|
||||
now = time.time()
|
||||
if (now - upd) > 60:
|
||||
upd = now
|
||||
dur = now - start
|
||||
mb = count / 1e6
|
||||
logger.debug(" %5.1f%% %5.3f GB downloaded in %.0f min, %.3f MB/s" % (
|
||||
100.0 * count / item.sourcesize,
|
||||
mb / 1e3, dur / 60, mb / dur))
|
||||
except Exception as e:
|
||||
logger.error("problem decoding: %s" % (e))
|
||||
raise
|
||||
finally:
|
||||
try:
|
||||
signal.signal(signal.SIGINFO, signal.SIG_IGN)
|
||||
except Exception:
|
||||
pass
|
||||
elapsed = time.time() - start
|
||||
throughput = count / elapsed
|
||||
logger.info("%5.3fGB transferred in %d:%02d, %.1f MB/s" % (
|
||||
count/1e9, int(elapsed/3600), int(elapsed / 60) % 60, throughput/1e6))
|
||||
try:
|
||||
p_decode.stdin.close()
|
||||
p_decode.poll()
|
||||
if p_decode.returncode == None:
|
||||
time.sleep(1)
|
||||
p_decode.poll()
|
||||
if p_decode.returncode == None:
|
||||
logger.debug("terminating tivodecode")
|
||||
p_decode.terminate()
|
||||
except Exception, e:
|
||||
pass
|
||||
p_decode.wait()
|
||||
logger.info("tivodecode exited with %s" % (p_decode.returncode))
|
||||
size = os.path.getsize(target)
|
||||
if size < 1024:
|
||||
logger.error("error downloading file: too small")
|
||||
os.remove(target)
|
||||
raise TivoException("downloaded file is too small")
|
||||
|
||||
|
||||
def download_decode(item, mak):
|
||||
target = "%s.mpg" % item.file
|
||||
mp4 = "%s.mp4" % item.file
|
||||
try:
|
||||
os.makedirs(item.dir)
|
||||
except OSError:
|
||||
pass
|
||||
if 0 & os.path.exists(target):
|
||||
logger.info(" reusing existing download file")
|
||||
else:
|
||||
try:
|
||||
download(item, mak, target)
|
||||
except Exception, e:
|
||||
exc_info = sys.exc_info()
|
||||
try:
|
||||
os.remove(target)
|
||||
except Exception, e2:
|
||||
pass
|
||||
raise exc_info[1], None, exc_info[2]
|
||||
#tivomp4.transcode(target, mp4, item.ar)
|
||||
try:
|
||||
os.utime(target, (item.time, item.time))
|
||||
#os.utime(mp4, [item.date, item.date])
|
||||
except Exception, e:
|
||||
logger.error("Problem setting timestamp: %" % (e))
|
||||
|
||||
|
||||
def download_one(item, downloaddb):
|
||||
global logger, mak
|
||||
logger.info("*** downloading \"%s\": %.3fGB" % (item.name, item.sourcesize / 1e9))
|
||||
try:
|
||||
download_decode(item, mak)
|
||||
downloaddb[item.name] = item.datestr
|
||||
if getattr(downloaddb, "sync", None) and callable(downloaddb.sync):
|
||||
downloaddb.sync()
|
||||
logger.debug("Sleeping 30 seconds before moving on...")
|
||||
time.sleep(30)
|
||||
except TivoException, e:
|
||||
logger.info("Error processing \"%s\": %s" % (item.name, e))
|
||||
|
||||
|
||||
def wantitem(item, downloaddb):
|
||||
if item.inprogress == "Yes":
|
||||
return "recording"
|
||||
if item.available == "No":
|
||||
return "not available"
|
||||
if downloaddb.has_key(item.name):
|
||||
return "already downloaded"
|
||||
#if excludes.has_key(item.title) or excludes.has_key(item.episode) or excludes.has_key(item.name):
|
||||
# return "excluded"
|
||||
if includes.has_key(item.title) or includes.has_key(item.episode) or includes.has_key(item.name):
|
||||
pass
|
||||
else:
|
||||
return "not included"
|
||||
return ""
|
||||
|
||||
|
||||
def mirror(toc, downloaddb, one=False):
|
||||
avail = getAvail(targetdir)
|
||||
if avail < minfree:
|
||||
logger.error("%s: %.1fG available, at least %.1fG needed, stopping" % \
|
||||
(targetdir, avail / gig, minfree / gig))
|
||||
sys.exit(1)
|
||||
|
||||
items = toc.getItems()
|
||||
logger.info("*** %d shows listed" % (len(items)))
|
||||
for item in items:
|
||||
reason = wantitem(item, downloaddb)
|
||||
if reason != "":
|
||||
logger.debug("*** skipping \"%s\": %s" % (item.name, reason))
|
||||
else:
|
||||
download_one(item, downloaddb)
|
||||
if one:
|
||||
break
|
||||
|
||||
|
||||
def download_episode(toc, downloaddb, episode):
|
||||
items = toc.getItems()
|
||||
for item in items:
|
||||
if item.title == episode or item.name == episode or item.episode == episode:
|
||||
download_one(item, downloaddb)
|
||||
|
||||
|
||||
def printtoc(toc, downloaddb):
|
||||
items = toc.getItems()
|
||||
print "*** %d shows listed" % (len(items))
|
||||
shows = {}
|
||||
for item in items:
|
||||
if item.title not in shows:
|
||||
shows[item.title] = []
|
||||
shows[item.title].append(item)
|
||||
for title in sorted(shows):
|
||||
for item in sorted(shows[title], key=lambda i: i.name):
|
||||
reason = wantitem(item, downloaddb)
|
||||
if (reason != ""):
|
||||
print "%-7.7s: %s" % (reason, item.name)
|
||||
continue
|
||||
print "*** downloading %s (%.3fGB)" % (item.name, item.sourcesize / 1e9)
|
||||
|
||||
|
||||
def main():
|
||||
global ignoreepisodetitle, logger
|
||||
curdir = os.getcwd()
|
||||
os.chdir(os.path.expanduser("~") + "/.tivo")
|
||||
handler = logging.handlers.RotatingFileHandler("tivomirror.log", maxBytes=2*1024*1024, backupCount=5)
|
||||
handler.setFormatter(logging.Formatter(fmt='tivomirror[%d] %%(asctime)s %%(levelname)6.6s %%(message)s' % (os.getpid()),
|
||||
datefmt='%H:%M:%S'))
|
||||
logger.addHandler(handler)
|
||||
downloaddb = anydbm.open("downloads.db", "c")
|
||||
toc = TivoToc()
|
||||
cmd = "list"
|
||||
updateToc = False
|
||||
|
||||
try:
|
||||
options, remainder = getopt.getopt(sys.argv[1:], 'dvuT',
|
||||
['ignoreepisodetitle', 'debug', 'verbose', 'update'])
|
||||
|
||||
for opt, arg in options:
|
||||
if opt in ('-d', '--debug'):
|
||||
logger.setLevel(logging.DEBUG)
|
||||
if opt in ('-v', '--verbose'):
|
||||
handler = logging.StreamHandler()
|
||||
logger.addHandler(handler)
|
||||
if opt in ('-u', '--update'):
|
||||
updateToc = True
|
||||
if opt in ('-T', '--ignoreepisodetitle'):
|
||||
ignoreepisodetitle = True
|
||||
|
||||
if len(remainder) >= 1:
|
||||
cmd = remainder[0]
|
||||
|
||||
if updateToc or cmd == "mirror":
|
||||
toc.download()
|
||||
toc.save()
|
||||
else:
|
||||
toc.load()
|
||||
|
||||
if cmd == "mirror":
|
||||
mirror(toc, downloaddb)
|
||||
elif cmd == "mirrorone":
|
||||
mirror(toc, downloaddb, True)
|
||||
elif cmd == "list":
|
||||
printtoc(toc, downloaddb)
|
||||
elif cmd == "download":
|
||||
download_episode(toc, downloaddb, remainder[1])
|
||||
else:
|
||||
logger.error("invalid command %s" % (cmd))
|
||||
print >>sys.stderr, "invalid command %s" % (cmd)
|
||||
sys.exit(64)
|
||||
|
||||
downloaddb.close()
|
||||
except Exception:
|
||||
logger.exception("")
|
||||
logger.info("*** Completed")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in a new issue