2017-04-12 17:23:05 +00:00
|
|
|
#!/usr/local/bin/python
|
2017-04-16 15:46:43 +00:00
|
|
|
# -*- coding: utf8 -*-
|
2017-04-12 17:23:05 +00:00
|
|
|
|
|
|
|
# Stefans Script, um die Sendungen vom Tivo runterzuladen und in MPEG4
|
|
|
|
# zu transkodieren.
|
|
|
|
# Wird auf disklesslibber per Crontab-Eintrag stuendlich gestartet:
|
|
|
|
# flock -n /tmp/tivomirror.log -c 'tivomirror >.tivomirror.log 2>&1 </dev/null'
|
|
|
|
|
|
|
|
import sys
|
|
|
|
reload(sys)
|
|
|
|
sys.setdefaultencoding('utf-8')
|
|
|
|
|
|
|
|
import anydbm
|
|
|
|
import cookielib
|
|
|
|
import datetime
|
|
|
|
import getopt
|
|
|
|
import errno
|
|
|
|
import functools
|
|
|
|
import logging
|
|
|
|
import logging.handlers
|
|
|
|
import os
|
2017-04-16 15:46:43 +00:00
|
|
|
import pytz
|
2017-04-12 17:23:05 +00:00
|
|
|
import re
|
|
|
|
import requests
|
|
|
|
import signal
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
import urllib2
|
|
|
|
import xml.dom.minidom
|
|
|
|
|
|
|
|
host = "tivo.lassitu.de"
|
|
|
|
#host = "wavehh.lassitu.de:30080"
|
|
|
|
mak = "7194378159"
|
|
|
|
targetdir = "/p2/media/video/TV"
|
|
|
|
gig = 1024.0 * 1024 * 1024
|
|
|
|
minfree = 10 * gig
|
|
|
|
ignoreepisodetitle = False
|
2017-04-13 21:40:12 +00:00
|
|
|
tivodecode = "tivodecode"
|
|
|
|
cookies = "cookies.txt"
|
|
|
|
proxies=None
|
|
|
|
#proxies={"http":"http://us:8888","https":"http://us:8888"}
|
|
|
|
|
|
|
|
headers = requests.utils.default_headers()
|
|
|
|
headers.update(
|
|
|
|
{
|
|
|
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0',
|
|
|
|
}
|
|
|
|
)
|
2017-04-12 17:23:05 +00:00
|
|
|
|
2017-04-16 15:46:43 +00:00
|
|
|
class IncludeShow:
|
|
|
|
includes = dict()
|
|
|
|
|
|
|
|
def __init__(self, title, short=None):
|
|
|
|
self.short = short
|
|
|
|
self.title = title
|
|
|
|
self.timestamp = False
|
|
|
|
self.includes[title] = self
|
|
|
|
|
|
|
|
IncludeShow('Splash and Bubbles')
|
|
|
|
IncludeShow('Angie Tribeca')
|
|
|
|
IncludeShow('Anthony Bourdain Parts Unknown')
|
|
|
|
IncludeShow('Better Call Saul')
|
|
|
|
IncludeShow('Brooklyn Nine-Nine')
|
|
|
|
IncludeShow('Bull')
|
|
|
|
IncludeShow('College Football')
|
|
|
|
IncludeShow('Conan')
|
|
|
|
IncludeShow("Dirk Gently's Holistic Detective Agency", short='Dirk Gently')
|
|
|
|
IncludeShow('The Expanse')
|
|
|
|
IncludeShow('Family Guy')
|
|
|
|
IncludeShow('Full Frontal With Samantha Bee', short='Full Frontal')
|
|
|
|
IncludeShow("How It's Made")
|
|
|
|
IncludeShow("How Do They Do It?")
|
|
|
|
IncludeShow("How We Got to Now With Steven Johnson")
|
|
|
|
IncludeShow('Inside Amy Schumer')
|
|
|
|
IncludeShow('Join or Die With Craig Ferguson')
|
|
|
|
IncludeShow('Last Week Tonight With John Oliver', short='John Oliver')
|
|
|
|
IncludeShow('Louie')
|
|
|
|
IncludeShow('Modern Family')
|
|
|
|
IncludeShow('MythBusters')
|
|
|
|
IncludeShow('NCIS')
|
|
|
|
IncludeShow('NCIS: New Orleans')
|
|
|
|
#IncludeShow('NFL Football')
|
|
|
|
IncludeShow('Person of Interest')
|
|
|
|
IncludeShow('Saturday Night Live', short='SNL')
|
|
|
|
IncludeShow('Sesame Street')
|
|
|
|
IncludeShow("Somebody's Gotta Do It With Mike Rowe")
|
|
|
|
IncludeShow('StarTalk')
|
|
|
|
IncludeShow('The Big Bang Theory')
|
|
|
|
IncludeShow('The Daily Show With Trevor Noah', short='Daily Show')
|
|
|
|
IncludeShow('The Late Show With Stephen Colbert', short='Colbert')
|
|
|
|
#IncludeShow('The Late Late Show With James Corden')
|
|
|
|
IncludeShow('The Muppets')
|
|
|
|
IncludeShow('The X-Files')
|
|
|
|
#IncludeShow('The Tonight Show Starring Jimmy Fallon')
|
2017-04-12 17:23:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger('tivomirror')
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
|
|
|
|
|
|
|
|
class flushfile(object):
|
|
|
|
def __init__(self, f):
|
|
|
|
self.f = f
|
|
|
|
def write(self, x):
|
|
|
|
self.f.write(x)
|
|
|
|
self.f.flush()
|
|
|
|
sys.stdout = flushfile(sys.stdout)
|
|
|
|
|
|
|
|
tmp = "/tmp"
|
|
|
|
|
|
|
|
# prepare global requests sesssion to download the TOC and the episodes
|
|
|
|
requests.packages.urllib3.disable_warnings()
|
|
|
|
session = requests.session()
|
|
|
|
session.verify = False
|
|
|
|
session.auth = requests.auth.HTTPDigestAuth("tivo", mak)
|
|
|
|
session.keep_alive = False
|
2017-04-13 21:40:12 +00:00
|
|
|
session.proxies = proxies
|
2017-04-12 17:23:05 +00:00
|
|
|
|
2017-04-16 15:46:43 +00:00
|
|
|
|
|
|
|
def roundTime(dt=None, roundTo=60):
|
|
|
|
"""
|
|
|
|
http://stackoverflow.com/questions/3463930/how-to-round-the-minute-of-a-datetime-object-python
|
|
|
|
"""
|
|
|
|
if dt == None : dt = datetime.datetime.now()
|
|
|
|
seconds = (dt.replace(tzinfo=None) - dt.min).seconds
|
|
|
|
rounding = (seconds+roundTo/2) // roundTo * roundTo
|
|
|
|
return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond)
|
|
|
|
|
|
|
|
|
2017-04-12 17:23:05 +00:00
|
|
|
class TimeoutError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def timeout(seconds=10, error_message=os.strerror(errno.ETIMEDOUT)):
|
|
|
|
def decorator(func):
|
|
|
|
def _handle_timeout(signum, frame):
|
|
|
|
raise TimeoutError(error_message)
|
|
|
|
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
signal.signal(signal.SIGALRM, _handle_timeout)
|
|
|
|
signal.alarm(seconds)
|
|
|
|
try:
|
|
|
|
result = func(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
signal.alarm(0)
|
|
|
|
return result
|
|
|
|
|
|
|
|
return functools.wraps(func)(wrapper)
|
|
|
|
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
|
|
|
def trimDescription(desc):
|
|
|
|
desc = desc.strip()
|
|
|
|
i = desc.rfind(". Copyright Tribune Media Services, Inc.");
|
|
|
|
if i > 0:
|
|
|
|
desc = desc[0:i]
|
|
|
|
i = desc.rfind(". * Copyright Rovi, Inc");
|
|
|
|
if i > 0:
|
|
|
|
desc = desc[0:i]
|
|
|
|
if len(desc) > 80:
|
|
|
|
desc = desc[0:80]
|
|
|
|
return desc
|
|
|
|
|
2017-04-13 21:40:12 +00:00
|
|
|
def saveCookies(session, filename):
|
|
|
|
cj = cookielib.MozillaCookieJar(filename)
|
|
|
|
for cookie in session.cookies:
|
|
|
|
logger.debug("storing cookie %s" % (cookie))
|
|
|
|
cj.set_cookie(cookie)
|
|
|
|
logger.debug("Saving cookies to %s" % (cj))
|
|
|
|
cj.save(ignore_discard=True, ignore_expires=True)
|
|
|
|
|
|
|
|
|
2017-04-12 17:23:05 +00:00
|
|
|
class TivoException(Exception):
|
|
|
|
def __init__(self, value):
|
|
|
|
self.value = value
|
|
|
|
def __str__(self):
|
|
|
|
return repr(self.value)
|
|
|
|
|
|
|
|
class TivoItem:
|
|
|
|
def __init__(self, i):
|
|
|
|
self.title = getTagText(i, "Title")
|
|
|
|
self.episode = getTagText(i, "EpisodeTitle")
|
|
|
|
self.episodeNumber = getTagText(i, "EpisodeNumber")
|
|
|
|
self.description = trimDescription(getTagText(i, "Description"))
|
|
|
|
d = getTagText(i, "CaptureDate")
|
2017-04-16 15:46:43 +00:00
|
|
|
self.date = datetime.datetime.fromtimestamp(int(d, 16), pytz.utc)
|
2017-04-16 19:45:39 +00:00
|
|
|
self.time = int(d, base=0)
|
2017-04-16 15:46:43 +00:00
|
|
|
est = pytz.timezone('US/Eastern')
|
|
|
|
eastern = roundTime(self.date, 15*60).astimezone(est)
|
2017-04-12 17:23:05 +00:00
|
|
|
self.datestr = self.date.strftime("%Y%m%d-%H%M")
|
2017-04-16 15:46:43 +00:00
|
|
|
self.shortdate = eastern.strftime("%m%d-%H%M")
|
2017-04-12 17:23:05 +00:00
|
|
|
self.url = getTagText(i, "Url")
|
2017-04-13 21:38:32 +00:00
|
|
|
self.url = self.url + "&Format=video/x-tivo-mpeg-ts"
|
2017-04-12 17:23:05 +00:00
|
|
|
self.inprogress = getTagText(i, "InProgress")
|
|
|
|
self.available = getTagText(i, "Available")
|
|
|
|
self.sourcesize = int(getTagText(i, "SourceSize"))
|
|
|
|
self.highdef = getTagText(i, "HighDefinition")
|
|
|
|
self.unique = True
|
|
|
|
if ignoreepisodetitle:
|
|
|
|
self.episode = self.datestr
|
|
|
|
if self.episode == "":
|
|
|
|
if self.description != "":
|
|
|
|
self.episode = self.description
|
|
|
|
else:
|
|
|
|
self.episode = self.datestr
|
|
|
|
self.formatnames()
|
|
|
|
def makeNotUnique(self):
|
|
|
|
self.unique = False
|
|
|
|
self.formatnames()
|
|
|
|
def formatnames(self):
|
|
|
|
if self.episodeNumber and self.episodeNumber != u'0':
|
|
|
|
en = int(self.episodeNumber)
|
|
|
|
if en >= 100:
|
|
|
|
self.name = "%s S%02dE%02d %s" % (self.title, en / 100, en % 100, self.episode)
|
|
|
|
else:
|
|
|
|
self.name = "%s E%s %s" % (self.title, self.episodeNumber, self.episode)
|
|
|
|
elif self.unique:
|
|
|
|
self.name = "%s - %s" % (self.title, self.episode)
|
|
|
|
else:
|
|
|
|
self.name = "%s - %s - %s" % (self.title, self.datestr, self.episode)
|
|
|
|
self.dir = "%s/%s" % (targetdir, re.sub("[:/]", "-", self.title))
|
|
|
|
self.file = "%s/%s" % (self.dir, re.sub("[:/]", "-", self.name))
|
|
|
|
self.name = self.name.encode("utf-8");
|
|
|
|
self.dir = self.dir.encode("utf-8");
|
|
|
|
self.file = self.file.encode("utf-8");
|
2017-04-16 15:46:43 +00:00
|
|
|
def getPath(self, options):
|
|
|
|
title = self.title
|
|
|
|
if options.short:
|
|
|
|
title = options.short
|
|
|
|
if self.episodeNumber and self.episodeNumber != u'0':
|
|
|
|
en = int(self.episodeNumber)
|
|
|
|
if en >= 100:
|
|
|
|
name = "%s S%02dE%02d %s" % (title, en / 100, en % 100, self.episode)
|
|
|
|
else:
|
|
|
|
name = "%s E%s %s" % (title, self.episodeNumber, self.episode)
|
|
|
|
elif self.unique:
|
|
|
|
name = "%s - %s" % (title, self.episode)
|
|
|
|
else:
|
|
|
|
name = "%s - %s %s" % (title, self.shortdate, self.episode)
|
|
|
|
path = "%s/%s" % (self.dir, re.sub("[:/]", "-", name))
|
|
|
|
return path.encode("utf-8");
|
2017-04-12 17:23:05 +00:00
|
|
|
def __str__(self):
|
|
|
|
return repr(self.title)
|
|
|
|
|
|
|
|
|
|
|
|
class TivoToc:
|
|
|
|
def __init__(self):
|
|
|
|
self.dom = None
|
|
|
|
self.filename = "toc.xml"
|
|
|
|
self.uniquedb = anydbm.open("unique.db", "c")
|
|
|
|
self.items = []
|
|
|
|
pass
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
fd = open(self.filename, "r")
|
|
|
|
self.dom = xml.dom.minidom.parseString(fd.read())
|
|
|
|
fd.close()
|
|
|
|
return self.dom
|
|
|
|
|
|
|
|
def save(self):
|
|
|
|
fd = open(self.filename, "w")
|
|
|
|
fd.write(self.dom.toprettyxml())
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
def download_chunk(self, offset):
|
2017-04-13 21:40:12 +00:00
|
|
|
global session, proxies, headers
|
2017-04-12 17:23:05 +00:00
|
|
|
|
|
|
|
params = {
|
|
|
|
'Command': 'QueryContainer',
|
|
|
|
'Container': '/NowPlaying',
|
|
|
|
'Recurse': 'Yes',
|
|
|
|
'ItemCount': '50',
|
|
|
|
'AnchorOffset': offset
|
|
|
|
}
|
|
|
|
url = "https://{}/TiVoConnect".format(host)
|
|
|
|
logger.debug(" offset %d" % (offset))
|
2017-04-13 21:40:12 +00:00
|
|
|
r = session.get(url, params=params, timeout=30, verify=False, proxies=proxies, headers=headers)
|
2017-04-12 17:23:05 +00:00
|
|
|
if r.status_code != 200:
|
|
|
|
r.raise_for_status()
|
|
|
|
return r.text
|
|
|
|
|
|
|
|
def download(self):
|
2017-04-13 21:40:12 +00:00
|
|
|
global session
|
2017-04-12 17:23:05 +00:00
|
|
|
offset = 0
|
|
|
|
itemCount = 1
|
|
|
|
self.dom = None
|
|
|
|
root = None
|
|
|
|
logger.info("*** Getting listing")
|
|
|
|
while itemCount > 0:
|
|
|
|
dom = xml.dom.minidom.parseString(self.download_chunk(offset))
|
|
|
|
if self.dom == None:
|
|
|
|
self.dom = dom
|
|
|
|
root = self.dom.childNodes.item(0)
|
|
|
|
else:
|
|
|
|
for child in dom.childNodes.item(0).childNodes:
|
|
|
|
if child.nodeName == "Item":
|
|
|
|
root.appendChild(child.cloneNode(True))
|
|
|
|
itemCount = int(getElementText(dom.documentElement.childNodes, "ItemCount"))
|
|
|
|
offset += itemCount
|
2017-04-13 21:40:12 +00:00
|
|
|
saveCookies(session, cookies)
|
2017-04-12 17:23:05 +00:00
|
|
|
return self.dom
|
|
|
|
|
|
|
|
def getItems(self):
|
|
|
|
self.titles = {}
|
|
|
|
for node in self.dom.getElementsByTagName("Item"):
|
|
|
|
item = TivoItem(node)
|
|
|
|
self.items.append(item)
|
|
|
|
if item.title not in self.titles:
|
|
|
|
self.titles[item.title] = []
|
|
|
|
self.titles[item.title].append(item)
|
|
|
|
# see if we have items that end up having an identical name; mark
|
|
|
|
# the program title in uniquedb if that's the case
|
|
|
|
for title in self.titles:
|
|
|
|
names = {}
|
|
|
|
for item in self.titles[title]:
|
|
|
|
if item.name not in names:
|
|
|
|
names[item.name] = []
|
|
|
|
names[item.name].append(item)
|
|
|
|
for name in names:
|
|
|
|
if len(names[name]) > 1:
|
|
|
|
self.uniquedb[title.encode("utf-8")] = "1"
|
|
|
|
if getattr(self.uniquedb, "sync", None) and callable(self.uniquedb.sync):
|
|
|
|
self.uniquedb.sync()
|
|
|
|
for item in self.items:
|
|
|
|
if self.uniquedb.has_key(item.title.encode("utf-8")):
|
|
|
|
item.makeNotUnique()
|
|
|
|
return self.items
|
|
|
|
|
|
|
|
|
|
|
|
def getText(nodelist):
|
|
|
|
rc = ""
|
|
|
|
for node in nodelist:
|
|
|
|
if node.nodeType == node.TEXT_NODE:
|
|
|
|
rc = rc + node.data
|
|
|
|
return rc
|
|
|
|
|
|
|
|
def getTagText(element, tagname):
|
|
|
|
try:
|
|
|
|
return getText(element.getElementsByTagName(tagname)[0].childNodes)
|
|
|
|
except IndexError:
|
|
|
|
return ""
|
|
|
|
|
|
|
|
def getElementText(nodes, name):
|
|
|
|
for node in nodes:
|
|
|
|
if node.nodeType == xml.dom.Node.ELEMENT_NODE and node.nodeName == name:
|
|
|
|
return getText(node.childNodes)
|
|
|
|
return None
|
|
|
|
|
|
|
|
def getAvail(dir):
|
|
|
|
s = os.statvfs(dir)
|
|
|
|
return s.f_bsize * s.f_bavail
|
|
|
|
|
|
|
|
|
|
|
|
class FdLogger(threading.Thread):
|
|
|
|
def __init__(self, logger, lvl, fd):
|
|
|
|
self.logger = logger
|
|
|
|
self.lvl = lvl
|
|
|
|
self.fd = fd
|
|
|
|
threading.Thread.__init__(self)
|
|
|
|
self.daemon = True
|
|
|
|
self.start()
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
try:
|
|
|
|
# for line in fd buffers, so use this instead
|
|
|
|
for line in iter(self.fd.readline, b''):
|
|
|
|
self.logger.log(self.lvl, ": %s", line.strip('\n'))
|
|
|
|
self.fd.close()
|
|
|
|
except Exception:
|
|
|
|
self.logger.exception("")
|
|
|
|
|
|
|
|
|
2017-04-13 21:40:12 +00:00
|
|
|
@timeout(43200)
|
2017-04-13 21:47:46 +00:00
|
|
|
def download_item(item, mak, target):
|
2017-04-13 21:40:12 +00:00
|
|
|
global session, proxies, headers
|
2017-04-12 17:23:05 +00:00
|
|
|
count = 0
|
|
|
|
start = time.time()
|
|
|
|
upd = start
|
|
|
|
url = item.url
|
|
|
|
#url = re.sub("tivo.lassitu.de:80", "wavehh.lassitu.de:30080", url)
|
|
|
|
logger.info("--- downloading \"%s\"" % (url))
|
2017-04-16 15:46:43 +00:00
|
|
|
logger.info(" {}".format(target))
|
2017-04-12 17:23:05 +00:00
|
|
|
start = time.time()
|
2017-04-13 21:40:12 +00:00
|
|
|
r = session.get(url, stream=True, verify=False, proxies=proxies, headers=headers)
|
2017-04-12 17:23:05 +00:00
|
|
|
r.raise_for_status()
|
|
|
|
|
|
|
|
try:
|
2017-04-13 21:40:12 +00:00
|
|
|
p_decode = subprocess.Popen([tivodecode, "--mak", mak, \
|
2017-04-12 17:23:05 +00:00
|
|
|
"--no-verify", "--out", target, "-"], stdin=subprocess.PIPE,
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
|
|
FdLogger(logger, logging.INFO, p_decode.stdout)
|
|
|
|
FdLogger(logger, logging.INFO, p_decode.stderr)
|
|
|
|
def info(signum, frame):
|
|
|
|
upd = time.time()
|
|
|
|
dur = now - start
|
|
|
|
mb = count / 1e6
|
|
|
|
print "%5.1f%% %5.3f GB downloaded in %.0f min, %.3f MB/s" % (
|
|
|
|
100.0 * count / item.sourcesize,
|
|
|
|
mb / 1e3, dur / 60, mb / dur)
|
|
|
|
try:
|
|
|
|
signal.signal(signal.SIGINFO, info)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
while True:
|
|
|
|
time.sleep(0) # yield to logger threads
|
2017-04-13 21:40:12 +00:00
|
|
|
chunk = r.raw.read(256*1024)
|
|
|
|
if not chunk:
|
2017-04-12 17:23:05 +00:00
|
|
|
break
|
2017-04-13 21:40:12 +00:00
|
|
|
p_decode.stdin.write(chunk)
|
2017-04-12 17:23:05 +00:00
|
|
|
count += len(chunk)
|
|
|
|
now = time.time()
|
|
|
|
if (now - upd) > 60:
|
|
|
|
upd = now
|
|
|
|
dur = now - start
|
|
|
|
mb = count / 1e6
|
|
|
|
logger.debug(" %5.1f%% %5.3f GB downloaded in %.0f min, %.3f MB/s" % (
|
|
|
|
100.0 * count / item.sourcesize,
|
|
|
|
mb / 1e3, dur / 60, mb / dur))
|
|
|
|
except Exception as e:
|
|
|
|
logger.error("problem decoding: %s" % (e))
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
try:
|
|
|
|
signal.signal(signal.SIGINFO, signal.SIG_IGN)
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
elapsed = time.time() - start
|
|
|
|
throughput = count / elapsed
|
|
|
|
logger.info("%5.3fGB transferred in %d:%02d, %.1f MB/s" % (
|
|
|
|
count/1e9, int(elapsed/3600), int(elapsed / 60) % 60, throughput/1e6))
|
|
|
|
try:
|
|
|
|
p_decode.stdin.close()
|
|
|
|
p_decode.poll()
|
|
|
|
if p_decode.returncode == None:
|
|
|
|
time.sleep(1)
|
|
|
|
p_decode.poll()
|
|
|
|
if p_decode.returncode == None:
|
|
|
|
logger.debug("terminating tivodecode")
|
|
|
|
p_decode.terminate()
|
|
|
|
except Exception, e:
|
|
|
|
pass
|
|
|
|
p_decode.wait()
|
|
|
|
logger.info("tivodecode exited with %s" % (p_decode.returncode))
|
|
|
|
size = os.path.getsize(target)
|
2017-04-13 21:40:12 +00:00
|
|
|
if size < 1024 or size < item.sourcesize * 0.8:
|
2017-04-12 17:23:05 +00:00
|
|
|
logger.error("error downloading file: too small")
|
|
|
|
os.remove(target)
|
|
|
|
raise TivoException("downloaded file is too small")
|
|
|
|
|
|
|
|
|
2017-04-16 15:46:43 +00:00
|
|
|
def download_decode(item, options, mak):
|
|
|
|
target = "%s.mpg" % item.getPath(options)
|
2017-04-12 17:23:05 +00:00
|
|
|
try:
|
|
|
|
os.makedirs(item.dir)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2017-04-13 21:40:12 +00:00
|
|
|
try:
|
2017-04-13 21:47:46 +00:00
|
|
|
download_item(item, mak, target)
|
2017-04-13 21:40:12 +00:00
|
|
|
except Exception, e:
|
|
|
|
exc_info = sys.exc_info()
|
2017-04-12 17:23:05 +00:00
|
|
|
try:
|
2017-04-13 21:40:12 +00:00
|
|
|
os.remove(target)
|
|
|
|
except Exception, e2:
|
|
|
|
pass
|
|
|
|
raise exc_info[1], None, exc_info[2]
|
2017-04-12 17:23:05 +00:00
|
|
|
try:
|
|
|
|
os.utime(target, (item.time, item.time))
|
|
|
|
except Exception, e:
|
2017-04-16 16:14:16 +00:00
|
|
|
logger.error("Problem setting timestamp: {}".format(e))
|
2017-04-12 17:23:05 +00:00
|
|
|
|
|
|
|
|
2017-04-16 15:46:43 +00:00
|
|
|
def download_one(item, downloaddb, options):
|
2017-04-12 17:23:05 +00:00
|
|
|
global logger, mak
|
|
|
|
logger.info("*** downloading \"%s\": %.3fGB" % (item.name, item.sourcesize / 1e9))
|
|
|
|
try:
|
2017-04-16 15:46:43 +00:00
|
|
|
download_decode(item, options, mak)
|
2017-04-12 17:23:05 +00:00
|
|
|
downloaddb[item.name] = item.datestr
|
|
|
|
if getattr(downloaddb, "sync", None) and callable(downloaddb.sync):
|
|
|
|
downloaddb.sync()
|
|
|
|
logger.debug("Sleeping 30 seconds before moving on...")
|
|
|
|
time.sleep(30)
|
|
|
|
except TivoException, e:
|
|
|
|
logger.info("Error processing \"%s\": %s" % (item.name, e))
|
|
|
|
|
|
|
|
|
|
|
|
def wantitem(item, downloaddb):
|
|
|
|
if item.inprogress == "Yes":
|
|
|
|
return "recording"
|
|
|
|
if item.available == "No":
|
|
|
|
return "not available"
|
|
|
|
if downloaddb.has_key(item.name):
|
|
|
|
return "already downloaded"
|
2017-04-16 15:46:43 +00:00
|
|
|
for i in (item.title, item.episode, item.name):
|
|
|
|
if IncludeShow.includes.has_key(i):
|
|
|
|
return IncludeShow.includes[i]
|
|
|
|
return "not included"
|
2017-04-12 17:23:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def mirror(toc, downloaddb, one=False):
|
|
|
|
avail = getAvail(targetdir)
|
|
|
|
if avail < minfree:
|
|
|
|
logger.error("%s: %.1fG available, at least %.1fG needed, stopping" % \
|
|
|
|
(targetdir, avail / gig, minfree / gig))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
items = toc.getItems()
|
|
|
|
logger.info("*** %d shows listed" % (len(items)))
|
|
|
|
for item in items:
|
2017-04-16 15:46:43 +00:00
|
|
|
options = wantitem(item, downloaddb)
|
|
|
|
if isinstance(options, basestring):
|
|
|
|
logger.debug("*** skipping \"%s\": %s" % (item.name, options))
|
2017-04-12 17:23:05 +00:00
|
|
|
else:
|
2017-04-16 15:46:43 +00:00
|
|
|
download_one(item, downloaddb, options)
|
2017-04-12 17:23:05 +00:00
|
|
|
if one:
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
def download_episode(toc, downloaddb, episode):
|
|
|
|
items = toc.getItems()
|
|
|
|
for item in items:
|
|
|
|
if item.title == episode or item.name == episode or item.episode == episode:
|
|
|
|
download_one(item, downloaddb)
|
|
|
|
|
|
|
|
|
|
|
|
def printtoc(toc, downloaddb):
|
|
|
|
items = toc.getItems()
|
|
|
|
print "*** %d shows listed" % (len(items))
|
|
|
|
shows = {}
|
|
|
|
for item in items:
|
|
|
|
if item.title not in shows:
|
|
|
|
shows[item.title] = []
|
|
|
|
shows[item.title].append(item)
|
|
|
|
for title in sorted(shows):
|
|
|
|
for item in sorted(shows[title], key=lambda i: i.name):
|
2017-04-16 15:46:43 +00:00
|
|
|
options = wantitem(item, downloaddb)
|
|
|
|
if isinstance(options, basestring):
|
|
|
|
print "%-7.7s: %s" % (options, item.name)
|
2017-04-12 17:23:05 +00:00
|
|
|
continue
|
|
|
|
print "*** downloading %s (%.3fGB)" % (item.name, item.sourcesize / 1e9)
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
global ignoreepisodetitle, logger
|
|
|
|
curdir = os.getcwd()
|
|
|
|
os.chdir(os.path.expanduser("~") + "/.tivo")
|
|
|
|
handler = logging.handlers.RotatingFileHandler("tivomirror.log", maxBytes=2*1024*1024, backupCount=5)
|
|
|
|
handler.setFormatter(logging.Formatter(fmt='tivomirror[%d] %%(asctime)s %%(levelname)6.6s %%(message)s' % (os.getpid()),
|
|
|
|
datefmt='%H:%M:%S'))
|
|
|
|
logger.addHandler(handler)
|
|
|
|
downloaddb = anydbm.open("downloads.db", "c")
|
|
|
|
toc = TivoToc()
|
|
|
|
cmd = "list"
|
|
|
|
updateToc = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
options, remainder = getopt.getopt(sys.argv[1:], 'dvuT',
|
|
|
|
['ignoreepisodetitle', 'debug', 'verbose', 'update'])
|
|
|
|
|
|
|
|
for opt, arg in options:
|
|
|
|
if opt in ('-d', '--debug'):
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
if opt in ('-v', '--verbose'):
|
|
|
|
handler = logging.StreamHandler()
|
|
|
|
logger.addHandler(handler)
|
|
|
|
if opt in ('-u', '--update'):
|
|
|
|
updateToc = True
|
|
|
|
if opt in ('-T', '--ignoreepisodetitle'):
|
|
|
|
ignoreepisodetitle = True
|
|
|
|
|
|
|
|
if len(remainder) >= 1:
|
|
|
|
cmd = remainder[0]
|
|
|
|
|
|
|
|
if updateToc or cmd == "mirror":
|
|
|
|
toc.download()
|
|
|
|
toc.save()
|
|
|
|
else:
|
|
|
|
toc.load()
|
|
|
|
|
|
|
|
if cmd == "mirror":
|
|
|
|
mirror(toc, downloaddb)
|
|
|
|
elif cmd == "mirrorone":
|
|
|
|
mirror(toc, downloaddb, True)
|
|
|
|
elif cmd == "list":
|
|
|
|
printtoc(toc, downloaddb)
|
|
|
|
elif cmd == "download":
|
|
|
|
download_episode(toc, downloaddb, remainder[1])
|
|
|
|
else:
|
|
|
|
logger.error("invalid command %s" % (cmd))
|
|
|
|
print >>sys.stderr, "invalid command %s" % (cmd)
|
|
|
|
sys.exit(64)
|
|
|
|
|
|
|
|
downloaddb.close()
|
|
|
|
except Exception:
|
|
|
|
logger.exception("")
|
|
|
|
logger.info("*** Completed")
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|