Skip to content

Commit

Permalink
Merge pull request #120 from mediathekview/develop
Browse files Browse the repository at this point in the history
Release 0.6.2
  • Loading branch information
tuxpoldo committed Mar 10, 2019
2 parents 06b3877 + 8937c97 commit 8cfccdc
Show file tree
Hide file tree
Showing 6 changed files with 81 additions and 25 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
*.pyo
*.pyc
*.db
__pycache__
/Filmliste-*
/.vscode
/venv
10 changes: 8 additions & 2 deletions addon.xml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="plugin.video.mediathekview"
name="MediathekView"
version="0.6.0"
version="0.6.2"
provider-name="MediathekView.de, Leo Moll">
<requires>
<import addon="xbmc.python" version="2.25.0"/>
Expand All @@ -26,7 +26,13 @@
<description lang="de_DE">Ermöglicht den Zugriff auf fast alle deutschen Mediatheken der öffentlich Rechtlichen basierend auf der Datenbank von MediathekView.de</description>
<description lang="en_GB">Gives access to most video-platforms from German public service broadcasters using the database of MediathekView.de</description>
<description lang="it_IT">Fornisce l'accesso a gran parte delle piattaforme video operate dalle emittenti pubbliche tedesche usando la banca dati di MediathekView.de</description>
<news>v0.6.0 (2019-03-01):
<news>v.0.6.2 (2019-03-10):
- Bugfix: Entpacken von GZ-Archiven auf FireTV 4K und ähnliche Geräte funktioniert nun wieder
- Bugfix: mvupdate funktioniert nun wieder
- Compliance: Code Kompatibilität für zukünftige Kodi Versionen mit Python 3
v.0.6.1 (2019-03-08):
- Bugfix: Fixed module exception due to case error
v0.6.0 (2019-03-01):
- Feature: Cache der Anfragen für lokale Datenbanken
- Feature: Natives ultraschnelles Update für SQLite
- Bugfix: Live Streams konnten heruntergeladen werden
Expand Down
17 changes: 12 additions & 5 deletions resources/lib/kodi/kodiaddon.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,27 @@
"""
The Kodi addons module
Copyright 2017-2018, Leo Moll and Dominik Schlösser
Copyright 2017-2019, Leo Moll and Dominik Schlösser
Licensed under MIT License
"""
import os
import sys
import urllib
import urlparse

# pylint: disable=import-error
import xbmc
import xbmcgui
import xbmcaddon
import xbmcplugin

try:
# Python 3.x
from urllib.parse import urlencode
from urllib.parse import parse_qs
except ImportError:
# Python 2.x
from urllib import urlencode
from urlparse import parse_qs

from resources.lib.kodi.kodilogger import KodiLogger


Expand Down Expand Up @@ -96,7 +103,7 @@ class KodiPlugin(KodiAddon):

def __init__(self):
KodiAddon.__init__(self)
self.args = urlparse.parse_qs(sys.argv[2][1:])
self.args = parse_qs(sys.argv[2][1:])
self.base_url = sys.argv[0]
self.addon_handle = int(sys.argv[1])

Expand Down Expand Up @@ -141,7 +148,7 @@ def build_url(self, params):
Args:
params(object): an object containing parameters
"""
return self.base_url + '?' + urllib.urlencode(params)
return self.base_url + '?' + urlencode(params)

def run_plugin(self, params):
"""
Expand Down
5 changes: 3 additions & 2 deletions resources/lib/mvupdate.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ class Settings(object):
def __init__(self, args):
self.datapath = args.path if args.dbtype == 'sqlite' else './'
self.type = {'sqlite': 0, 'mysql': 1}.get(args.dbtype, 0)
if self.type == 1:
if self.type == 0:
self.updnative = args.native
elif self.type == 1:
self.host = args.host
self.port = int(args.port)
self.user = args.user
Expand All @@ -38,7 +40,6 @@ def __init__(self, args):
self.recentmode = 0
self.groupshows = False
self.updmode = 3
self.updnative = args.native
self.updinterval = args.intervall

@staticmethod
Expand Down
33 changes: 25 additions & 8 deletions resources/lib/mvutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,14 @@
import stat
import string

import urllib
import urllib2
# pylint: disable=import-error
try:
# Python 3.x
from urllib.parse import urlencode
from urllib.request import urlopen
except ImportError:
from urllib import urlencode
from urllib2 import urlopen

from contextlib import closing
from resources.lib.exceptions import ExitRequested
Expand Down Expand Up @@ -108,12 +114,23 @@ def file_rename(srcname, dstname):
return False


def find_gzip():
"""
Return the full pathname to the gzip decompressor
executable
"""
for gzbin in ['/bin/gzip', '/usr/bin/gzip', '/usr/local/bin/gzip', '/system/bin/gzip']:
if file_exists(gzbin):
return gzbin
return None


def find_xz():
"""
Return the full pathname to the xz decompressor
executable
"""
for xzbin in ['/bin/xz', '/usr/bin/xz', '/usr/local/bin/xz']:
for xzbin in ['/bin/xz', '/usr/bin/xz', '/usr/local/bin/xz', '/system/bin/xz']:
if file_exists(xzbin):
return xzbin
return None
Expand All @@ -125,7 +142,7 @@ def make_search_string(val):
containing only a well defined set of characters
for a simplified search
"""
cset = string.letters + string.digits + ' _-#'
cset = string.ascii_letters + string.digits + ' _-#'
search = ''.join([c for c in val if c in cset])
return search.upper().strip()

Expand Down Expand Up @@ -156,7 +173,7 @@ def cleanup_filename(val):
Args:
val(str): input string
"""
cset = string.letters + string.digits + \
cset = string.ascii_letters + string.digits + \
u' _-#äöüÄÖÜßáàâéèêíìîóòôúùûÁÀÉÈÍÌÓÒÚÙçÇœ'
search = ''.join([c for c in val if c in cset])
return search.strip()
Expand Down Expand Up @@ -185,7 +202,7 @@ def url_retrieve(url, filename, reporthook, chunk_size=8192, aborthook=None):
each block read thereafter. If specified the operation will be
aborted if the hook function returns `True`
"""
with closing(urllib2.urlopen(url)) as src, closing(open(filename, 'wb')) as dst:
with closing(urlopen(url)) as src, closing(open(filename, 'wb')) as dst:
_chunked_url_copier(src, dst, reporthook, chunk_size, aborthook)


Expand Down Expand Up @@ -213,7 +230,7 @@ def url_retrieve_vfs(url, filename, reporthook, chunk_size=8192, aborthook=None)
each block read thereafter. If specified the operation will be
aborted if the hook function returns `True`
"""
with closing(urllib2.urlopen(url)) as src, closing(xbmcvfs.File(filename, 'wb')) as dst:
with closing(urlopen(url)) as src, closing(xbmcvfs.File(filename, 'wb')) as dst:
_chunked_url_copier(src, dst, reporthook, chunk_size, aborthook)


Expand All @@ -224,7 +241,7 @@ def build_url(query):
Args:
query(object): a query object
"""
return sys.argv[0] + '?' + urllib.urlencode(query)
return sys.argv[0] + '?' + urlencode(query)


def _chunked_url_copier(src, dst, reporthook, chunk_size, aborthook):
Expand Down
40 changes: 32 additions & 8 deletions resources/lib/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,17 @@
# -- Imports ------------------------------------------------
import os
import time
import urllib2
import datetime
import subprocess

# pylint: disable=import-error
try:
# Python 3.x
from urllib.error import URLError
except ImportError:
# Python 2.x
from urllib2 import URLError

from contextlib import closing

import ijson
Expand Down Expand Up @@ -338,7 +345,7 @@ def get_newest_list(self, full):
reporthook=self.notifier.hook_download_progress,
aborthook=self.monitor.abort_requested
)
except urllib2.URLError as err:
except URLError as err:
self.logger.error('Failure downloading {} - {}', url, err)
self.notifier.close_download_progress()
self.notifier.show_download_error(url, err)
Expand Down Expand Up @@ -369,7 +376,7 @@ def get_newest_list(self, full):
retval = self._decompress_gz(compfile, destfile)
self.logger.info('Return {}', retval)
else:
# should nebver reach
# should never reach
pass

self.notifier.close_download_progress()
Expand Down Expand Up @@ -587,20 +594,36 @@ def _decompress_bz2(self, sourcefile, destfile):
return 0

def _decompress_gz(self, sourcefile, destfile):
"""
blocksize = 8192
# pylint: disable=broad-except

try:
with open(destfile, 'wb') as dstfile, gzip.open(sourcefile) as srcfile:
for data in iter(lambda: srcfile.read(blocksize), b''):
dstfile.write(data)
# pylint: disable=broad-except
except Exception as err:
self.logger.error('gz decompression of "{}" to "{}" failed: {}'.format(
sourcefile, destfile, err))
self.logger.error(
'gz decompression of "{}" to "{}" failed: {}', sourcefile, destfile, err)
if mvutils.find_gzip() is not None:
gzip_binary = mvutils.find_gzip()
self.logger.info(
'Trying to decompress gzip file "{}" using {}...', sourcefile, gzip_binary)
try:
mvutils.file_remove(destfile)
retval = subprocess.call([gzip_binary, '-d', sourcefile])
self.logger.info('Calling {} -d {} returned {}',
gzip_binary, sourcefile, retval)
return retval
except Exception as err:
self.logger.error(
'gz commandline decompression of "{}" to "{}" failed: {}',
sourcefile, destfile, err)
return -1
return 0
"""

# pylint: disable=pointless-string-statement
"""
def _decompress_gz(self, sourcefile, destfile):
blocksize = 8192
# pylint: disable=broad-except,line-too-long
Expand Down Expand Up @@ -631,3 +654,4 @@ def _decompress_gz(self, sourcefile, destfile):
sourcefile, destfile, err))
return -1
return 0
"""

0 comments on commit 8cfccdc

Please sign in to comment.