From efe66d779a6e25c55781ffcf5269ebf0e4266193 Mon Sep 17 00:00:00 2001 From: Michal Paulovic Date: Thu, 18 Oct 2018 14:34:21 +0200 Subject: [PATCH 01/23] add TMDB provider --- lib/AutoCompletion.py | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index df6152f..2dfadc9 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -43,6 +43,8 @@ def get_autocomplete_items(search_str, limit=10, provider=None): provider = BingProvider(limit=limit) elif SETTING("autocomplete_provider") == "netflix": provider = NetflixProvider(limit=limit) + elif SETTING("autocomplete_provider") == "tmdb": + provider = TmdbProvider(limit=limit) else: provider = LocalDictProvider(limit=limit) provider.limit = limit @@ -138,6 +140,33 @@ def fetch_data(self, search_str): return [] return [i["title"] for i in result["groups"][0]["items"]] +class TmdbProvider(BaseProvider): + + BASE_URL = "https://www.themoviedb.org/search/multi?" + + def __init__(self, *args, **kwargs): + super(TmdbProvider, self).__init__(*args, **kwargs) + + def fetch_data(self, search_str): + url = "language=%s&query=%s" % (SETTING("autocomplete_lang"),urllib.quote_plus(search_str)) + result = get_JSON_response(url=self.BASE_URL + url, + headers=HEADERS, + folder="TMDB") + if not result or "results" not in result: + return [] + out = [] + for i in result["results"]: + title = None + if "media_type" in i: + if i["media_type"] == "movie": + title = i["title"] + elif i["media_type"] in ["tv", "person"]: + title = i["name"] + else: + title = i + out.append(title) + return out + class LocalDictProvider(BaseProvider): From 6b9167bd0dc5cb340e08fa0b1f5b1c84e1d4f06d Mon Sep 17 00:00:00 2001 From: Michal Paulovic Date: Thu, 18 Oct 2018 14:44:34 +0200 Subject: [PATCH 02/23] fix Netflix cache folder --- lib/AutoCompletion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 2dfadc9..eb11022 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -135,7 +135,7 @@ def fetch_data(self, search_str): url = "term=%s" % (urllib.quote_plus(search_str)) result = get_JSON_response(url=self.BASE_URL + url, headers=HEADERS, - folder="Bing") + folder="Netflix") if not result or not result["groups"]: return [] return [i["title"] for i in result["groups"][0]["items"]] From 5cb2887cc44ca69dc8f73aa0ad2ad4427c8d3197 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Fri, 17 Jun 2022 06:35:59 -0700 Subject: [PATCH 03/23] Remove Netflix provider --- lib/AutoCompletion.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index eb11022..6d6ce52 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -41,8 +41,6 @@ def get_autocomplete_items(search_str, limit=10, provider=None): provider = GoogleProvider(limit=limit) elif SETTING("autocomplete_provider") == "bing": provider = BingProvider(limit=limit) - elif SETTING("autocomplete_provider") == "netflix": - provider = NetflixProvider(limit=limit) elif SETTING("autocomplete_provider") == "tmdb": provider = TmdbProvider(limit=limit) else: @@ -124,22 +122,6 @@ def fetch_data(self, search_str): return result[1] -class NetflixProvider(BaseProvider): - - BASE_URL = "http://api-global.netflix.com/desktop/search/autocomplete?" - - def __init__(self, *args, **kwargs): - super(NetflixProvider, self).__init__(*args, **kwargs) - - def fetch_data(self, search_str): - url = "term=%s" % (urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="Netflix") - if not result or not result["groups"]: - return [] - return [i["title"] for i in result["groups"][0]["items"]] - class TmdbProvider(BaseProvider): BASE_URL = "https://www.themoviedb.org/search/multi?" From d8087ac7429c70923ac2e74ab66061c8cd83487c Mon Sep 17 00:00:00 2001 From: finkleandeinhorn <47431670+finkleandeinhorn@users.noreply.github.com> Date: Fri, 17 Jun 2022 12:12:50 -0700 Subject: [PATCH 04/23] Py3 updates --- lib/AutoCompletion.py | 77 +++++++++++++++++++++---------------------- 1 file changed, 37 insertions(+), 40 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 6d6ce52..525854a 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -3,20 +3,16 @@ # Copyright (C) 2015 - Philipp Temminghoff # This program is Free Software see LICENSE file for details -import sys -import urllib -import codecs +from urllib.parse import quote_plus import os import time import hashlib import requests -import simplejson +import json +import xbmc import xbmcaddon import xbmcvfs -import xbmc - -PY2 = sys.version_info[0] == 2 HEADERS = {'User-agent': 'Mozilla/5.0'} @@ -24,9 +20,7 @@ SETTING = ADDON.getSetting ADDON_PATH = os.path.join(os.path.dirname(__file__), "..") ADDON_ID = ADDON.getAddonInfo('id') -ADDON_DATA_PATH = xbmc.translatePath("special://profile/addon_data/%s" % ADDON_ID) -if PY2: - ADDON_DATA_PATH = ADDON_DATA_PATH.decode("utf-8") +ADDON_DATA_PATH = xbmcvfs.translatePath("special://profile/addon_data/%s" % ADDON_ID) def get_autocomplete_items(search_str, limit=10, provider=None): @@ -50,8 +44,6 @@ def get_autocomplete_items(search_str, limit=10, provider=None): def prep_search_str(text): - if not isinstance(text, unicode): - text = text.decode('utf-8') for char in text: if 1488 <= ord(char) <= 1514: return text[::-1] @@ -59,7 +51,6 @@ def prep_search_str(text): class BaseProvider(object): - def __init__(self, *args, **kwargs): self.limit = kwargs.get("limit", 10) @@ -72,7 +63,7 @@ def get_predictions(self, search_str): li = {"label": item, "search_string": prep_search_str(item)} items.append(li) - if i > self.limit: + if i > int(self.limit): break return items @@ -92,7 +83,10 @@ def __init__(self, *args, **kwargs): self.youtube = kwargs.get("youtube", False) def fetch_data(self, search_str): - url = "search?hl=%s&q=%s&json=t&client=serp" % (SETTING("autocomplete_lang"), urllib.quote_plus(search_str)) + url = "search?hl=%s&q=%s&json=t&client=serp" % ( + SETTING("autocomplete_lang"), + quote_plus(search_str), + ) if self.youtube: url += "&ds=yt" result = get_JSON_response(url=self.BASE_URL + url, @@ -112,10 +106,10 @@ def __init__(self, *args, **kwargs): super(BingProvider, self).__init__(*args, **kwargs) def fetch_data(self, search_str): - url = "query=%s" % (urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="Bing") + url = "query=%s" % (quote_plus(search_str)) + result = get_JSON_response( + url=self.BASE_URL + url, headers=HEADERS, folder="Bing" + ) if not result: return [] else: @@ -130,10 +124,13 @@ def __init__(self, *args, **kwargs): super(TmdbProvider, self).__init__(*args, **kwargs) def fetch_data(self, search_str): - url = "language=%s&query=%s" % (SETTING("autocomplete_lang"),urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="TMDB") + url = "language=%s&query=%s" % ( + SETTING("autocomplete_lang"), + quote_plus(search_str), + ) + result = get_JSON_response( + url=self.BASE_URL + url, headers=HEADERS, folder="TMDB" + ) if not result or "results" not in result: return [] out = [] @@ -151,7 +148,6 @@ def fetch_data(self, search_str): class LocalDictProvider(BaseProvider): - def __init__(self, *args, **kwargs): super(LocalDictProvider, self).__init__(*args, **kwargs) @@ -165,14 +161,14 @@ def get_predictions(self, search_str): search_str = search_str[k + 1:] local = SETTING("autocomplete_lang_local") path = os.path.join(ADDON_PATH, "resources", "data", "common_%s.txt" % (local if local else "en")) - with codecs.open(path, encoding="utf8") as f: - for line in f.readlines(): + with xbmcvfs.File(path) as f: + for line in f.read().split('\n'): if not line.startswith(search_str) or len(line) <= 2: continue li = {"label": line, "search_string": line} listitems.append(li) - if len(listitems) > self.limit: + if len(listitems) > int(self.limit): break return listitems @@ -182,7 +178,7 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): get JSON response for *url, makes use of file cache. """ now = time.time() - hashed_url = hashlib.md5(url).hexdigest() + hashed_url = hashlib.md5(url.encode('utf-8')).hexdigest() if folder: cache_path = xbmc.translatePath(os.path.join(ADDON_DATA_PATH, folder)) else: @@ -195,7 +191,7 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): else: response = get_http(url, headers) try: - results = simplejson.loads(response) + results = json.loads(response) log("download %s. time: %f" % (url, time.time() - now)) save_to_file(results, hashed_url, cache_path) except Exception: @@ -218,7 +214,8 @@ def get_http(url=None, headers=False): succeed = 0 if not headers: headers = {'User-agent': 'XBMC/16.0 ( phil65@kodi.tv )'} - while (succeed < 2) and (not xbmc.abortRequested): + monitor = xbmc.Monitor() + while (succeed < 2) and (not monitor.abortRequested()): try: r = requests.get(url, headers=headers) if r.status_code != 200: @@ -226,7 +223,7 @@ def get_http(url=None, headers=False): return r.text except Exception: log("get_http: could not get data from %s" % url) - xbmc.sleep(1000) + monitor.waitForAbort(1) succeed += 1 return None @@ -237,24 +234,22 @@ def read_from_file(path="", raw=False): """ if not xbmcvfs.exists(path): return False + try: - with open(path) as f: + with xbmcvfs.File(path) as f: log("opened textfile %s." % (path)) if raw: return f.read() else: - return simplejson.load(f) + return json.load(f) except Exception: log("failed to load textfile: " + path) return False def log(txt): - if isinstance(txt, str): - txt = txt.decode("utf-8", 'ignore') message = u'%s: %s' % (ADDON_ID, txt) - xbmc.log(msg=message.encode("utf-8", 'ignore'), - level=xbmc.LOGDEBUG) + xbmc.log(msg=message, level=xbmc.LOGDEBUG) def save_to_file(content, filename, path=""): @@ -263,10 +258,12 @@ def save_to_file(content, filename, path=""): """ if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) + text_file_path = os.path.join(path, filename + ".txt") now = time.time() - text_file = xbmcvfs.File(text_file_path, "w") - simplejson.dump(content, text_file) - text_file.close() + + with xbmcvfs.File(text_file_path, "w") as text_file: + json.dump(content, text_file) + log("saved textfile %s. Time: %f" % (text_file_path, time.time() - now)) return True From a55af72901de1e999c9fdb556ce03bd4a30f1f71 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Sat, 18 Jun 2022 10:29:57 -0700 Subject: [PATCH 05/23] Add kodi-addon-submitter to CI --- .github/workflows/addon-submitter.yml | 70 +++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .github/workflows/addon-submitter.yml diff --git a/.github/workflows/addon-submitter.yml b/.github/workflows/addon-submitter.yml new file mode 100644 index 0000000..9a5bbed --- /dev/null +++ b/.github/workflows/addon-submitter.yml @@ -0,0 +1,70 @@ +name: Kodi Addon-Submitter + +on: + create: + tags: + - v* + +jobs: + kodi-addon-submitter: + runs-on: ubuntu-latest + name: Kodi Addon Submitter + steps: + + - name: Checkout + uses: actions/checkout@v1 + + - name: Extract kodi official repository target + id: extract_branch_pr + shell: bash + env: + ADDON_ID: ${{ github.event.repository.name }} + run: | + echo "##[set-output name=branch;]$(git --no-pager branch -a --contains "$GITHUB_SHA" | grep 'remotes/origin/' | cut -d '/' -f3)" + if [[ $ADDON_ID == plugin* ]]; then + echo "##[set-output name=repo;]repo-plugins" + else + echo "##[set-output name=repo;]repo-scripts" + fi + + - name: Kodi addon checker validation + id: kodi-addon-checker + uses: xbmc/action-kodi-addon-checker@v1.2 + with: + kodi-version: ${{ steps.extract_branch_pr.outputs.branch }} + addon-id: ${{ github.event.repository.name }} + is-pr: false + + - name: Generate distribution zip and submit to official kodi repository + id: kodi-addon-submitter + uses: xbmc/action-kodi-addon-submitter@v1.2 + with: # Replace all the below values + kodi-repository: ${{ steps.extract_branch_pr.outputs.repo }} + kodi-version: ${{ steps.extract_branch_pr.outputs.branch }} + addon-id: ${{ github.event.repository.name }} + env: # Make sure you create the below secrets (GH_TOKEN and EMAIL) + GH_USERNAME: ${{ github.repository_owner }} + GH_TOKEN: ${{secrets.GH_TOKEN}} + EMAIL: ${{secrets.EMAIL}} + + - name: Create Github Release + id: create_release + uses: actions/create-release@v1.0.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: Release ${{ github.ref }} + draft: false + prerelease: false + + - name: Upload Addon zip to github release + id: upload-release-asset + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} + asset_name: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} + asset_content_type: application/zip \ No newline at end of file From 692737cca1d8c3843b6bcffe48f6d81b984ffb3a Mon Sep 17 00:00:00 2001 From: finkleandeinhorn <47431670+finkleandeinhorn@users.noreply.github.com> Date: Fri, 17 Jun 2022 12:37:30 -0700 Subject: [PATCH 06/23] Update addon.xml --- addon.xml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/addon.xml b/addon.xml index c16dca4..4085ad2 100644 --- a/addon.xml +++ b/addon.xml @@ -1,8 +1,7 @@ - + - - + @@ -12,5 +11,8 @@ all GNU GENERAL PUBLIC LICENSE Version 2.1, February 1999 https://github.com/phil65/script.module.autocompletion + + icon.png + From 27aa9043d9898e8e43e96f79346f2cee88df5d66 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Fri, 17 Jun 2022 06:35:59 -0700 Subject: [PATCH 07/23] Remove Netflix provider --- lib/AutoCompletion.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index eb11022..6d6ce52 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -41,8 +41,6 @@ def get_autocomplete_items(search_str, limit=10, provider=None): provider = GoogleProvider(limit=limit) elif SETTING("autocomplete_provider") == "bing": provider = BingProvider(limit=limit) - elif SETTING("autocomplete_provider") == "netflix": - provider = NetflixProvider(limit=limit) elif SETTING("autocomplete_provider") == "tmdb": provider = TmdbProvider(limit=limit) else: @@ -124,22 +122,6 @@ def fetch_data(self, search_str): return result[1] -class NetflixProvider(BaseProvider): - - BASE_URL = "http://api-global.netflix.com/desktop/search/autocomplete?" - - def __init__(self, *args, **kwargs): - super(NetflixProvider, self).__init__(*args, **kwargs) - - def fetch_data(self, search_str): - url = "term=%s" % (urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="Netflix") - if not result or not result["groups"]: - return [] - return [i["title"] for i in result["groups"][0]["items"]] - class TmdbProvider(BaseProvider): BASE_URL = "https://www.themoviedb.org/search/multi?" From c2743aac71eaf877f0127771b713ffb751c5eda4 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn <47431670+finkleandeinhorn@users.noreply.github.com> Date: Fri, 17 Jun 2022 12:12:50 -0700 Subject: [PATCH 08/23] Py3 updates --- lib/AutoCompletion.py | 77 +++++++++++++++++++++---------------------- 1 file changed, 37 insertions(+), 40 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 6d6ce52..525854a 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -3,20 +3,16 @@ # Copyright (C) 2015 - Philipp Temminghoff # This program is Free Software see LICENSE file for details -import sys -import urllib -import codecs +from urllib.parse import quote_plus import os import time import hashlib import requests -import simplejson +import json +import xbmc import xbmcaddon import xbmcvfs -import xbmc - -PY2 = sys.version_info[0] == 2 HEADERS = {'User-agent': 'Mozilla/5.0'} @@ -24,9 +20,7 @@ SETTING = ADDON.getSetting ADDON_PATH = os.path.join(os.path.dirname(__file__), "..") ADDON_ID = ADDON.getAddonInfo('id') -ADDON_DATA_PATH = xbmc.translatePath("special://profile/addon_data/%s" % ADDON_ID) -if PY2: - ADDON_DATA_PATH = ADDON_DATA_PATH.decode("utf-8") +ADDON_DATA_PATH = xbmcvfs.translatePath("special://profile/addon_data/%s" % ADDON_ID) def get_autocomplete_items(search_str, limit=10, provider=None): @@ -50,8 +44,6 @@ def get_autocomplete_items(search_str, limit=10, provider=None): def prep_search_str(text): - if not isinstance(text, unicode): - text = text.decode('utf-8') for char in text: if 1488 <= ord(char) <= 1514: return text[::-1] @@ -59,7 +51,6 @@ def prep_search_str(text): class BaseProvider(object): - def __init__(self, *args, **kwargs): self.limit = kwargs.get("limit", 10) @@ -72,7 +63,7 @@ def get_predictions(self, search_str): li = {"label": item, "search_string": prep_search_str(item)} items.append(li) - if i > self.limit: + if i > int(self.limit): break return items @@ -92,7 +83,10 @@ def __init__(self, *args, **kwargs): self.youtube = kwargs.get("youtube", False) def fetch_data(self, search_str): - url = "search?hl=%s&q=%s&json=t&client=serp" % (SETTING("autocomplete_lang"), urllib.quote_plus(search_str)) + url = "search?hl=%s&q=%s&json=t&client=serp" % ( + SETTING("autocomplete_lang"), + quote_plus(search_str), + ) if self.youtube: url += "&ds=yt" result = get_JSON_response(url=self.BASE_URL + url, @@ -112,10 +106,10 @@ def __init__(self, *args, **kwargs): super(BingProvider, self).__init__(*args, **kwargs) def fetch_data(self, search_str): - url = "query=%s" % (urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="Bing") + url = "query=%s" % (quote_plus(search_str)) + result = get_JSON_response( + url=self.BASE_URL + url, headers=HEADERS, folder="Bing" + ) if not result: return [] else: @@ -130,10 +124,13 @@ def __init__(self, *args, **kwargs): super(TmdbProvider, self).__init__(*args, **kwargs) def fetch_data(self, search_str): - url = "language=%s&query=%s" % (SETTING("autocomplete_lang"),urllib.quote_plus(search_str)) - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="TMDB") + url = "language=%s&query=%s" % ( + SETTING("autocomplete_lang"), + quote_plus(search_str), + ) + result = get_JSON_response( + url=self.BASE_URL + url, headers=HEADERS, folder="TMDB" + ) if not result or "results" not in result: return [] out = [] @@ -151,7 +148,6 @@ def fetch_data(self, search_str): class LocalDictProvider(BaseProvider): - def __init__(self, *args, **kwargs): super(LocalDictProvider, self).__init__(*args, **kwargs) @@ -165,14 +161,14 @@ def get_predictions(self, search_str): search_str = search_str[k + 1:] local = SETTING("autocomplete_lang_local") path = os.path.join(ADDON_PATH, "resources", "data", "common_%s.txt" % (local if local else "en")) - with codecs.open(path, encoding="utf8") as f: - for line in f.readlines(): + with xbmcvfs.File(path) as f: + for line in f.read().split('\n'): if not line.startswith(search_str) or len(line) <= 2: continue li = {"label": line, "search_string": line} listitems.append(li) - if len(listitems) > self.limit: + if len(listitems) > int(self.limit): break return listitems @@ -182,7 +178,7 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): get JSON response for *url, makes use of file cache. """ now = time.time() - hashed_url = hashlib.md5(url).hexdigest() + hashed_url = hashlib.md5(url.encode('utf-8')).hexdigest() if folder: cache_path = xbmc.translatePath(os.path.join(ADDON_DATA_PATH, folder)) else: @@ -195,7 +191,7 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): else: response = get_http(url, headers) try: - results = simplejson.loads(response) + results = json.loads(response) log("download %s. time: %f" % (url, time.time() - now)) save_to_file(results, hashed_url, cache_path) except Exception: @@ -218,7 +214,8 @@ def get_http(url=None, headers=False): succeed = 0 if not headers: headers = {'User-agent': 'XBMC/16.0 ( phil65@kodi.tv )'} - while (succeed < 2) and (not xbmc.abortRequested): + monitor = xbmc.Monitor() + while (succeed < 2) and (not monitor.abortRequested()): try: r = requests.get(url, headers=headers) if r.status_code != 200: @@ -226,7 +223,7 @@ def get_http(url=None, headers=False): return r.text except Exception: log("get_http: could not get data from %s" % url) - xbmc.sleep(1000) + monitor.waitForAbort(1) succeed += 1 return None @@ -237,24 +234,22 @@ def read_from_file(path="", raw=False): """ if not xbmcvfs.exists(path): return False + try: - with open(path) as f: + with xbmcvfs.File(path) as f: log("opened textfile %s." % (path)) if raw: return f.read() else: - return simplejson.load(f) + return json.load(f) except Exception: log("failed to load textfile: " + path) return False def log(txt): - if isinstance(txt, str): - txt = txt.decode("utf-8", 'ignore') message = u'%s: %s' % (ADDON_ID, txt) - xbmc.log(msg=message.encode("utf-8", 'ignore'), - level=xbmc.LOGDEBUG) + xbmc.log(msg=message, level=xbmc.LOGDEBUG) def save_to_file(content, filename, path=""): @@ -263,10 +258,12 @@ def save_to_file(content, filename, path=""): """ if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) + text_file_path = os.path.join(path, filename + ".txt") now = time.time() - text_file = xbmcvfs.File(text_file_path, "w") - simplejson.dump(content, text_file) - text_file.close() + + with xbmcvfs.File(text_file_path, "w") as text_file: + json.dump(content, text_file) + log("saved textfile %s. Time: %f" % (text_file_path, time.time() - now)) return True From 3e8e6d861c2f98358fd20402f355cc7ff43ca36d Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Sat, 18 Jun 2022 10:43:28 -0700 Subject: [PATCH 09/23] Add kodi-addon-submitter to CI --- .github/workflows/addon-submitter.yml | 70 +++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 .github/workflows/addon-submitter.yml diff --git a/.github/workflows/addon-submitter.yml b/.github/workflows/addon-submitter.yml new file mode 100644 index 0000000..1103ce0 --- /dev/null +++ b/.github/workflows/addon-submitter.yml @@ -0,0 +1,70 @@ +name: Kodi Addon-Submitter + +on: + create: + tags: + - v* + +jobs: + kodi-addon-submitter: + runs-on: ubuntu-latest + name: Kodi Addon Submitter + steps: + + - name: Checkout + uses: actions/checkout@v1 + + - name: Extract kodi official repository target + id: extract_branch_pr + shell: bash + env: + ADDON_ID: ${{ github.event.repository.name }} + run: | + echo "##[set-output name=branch;]matrix" + if [[ $ADDON_ID == plugin* ]]; then + echo "##[set-output name=repo;]repo-plugins" + else + echo "##[set-output name=repo;]repo-scripts" + fi + + - name: Kodi addon checker validation + id: kodi-addon-checker + uses: xbmc/action-kodi-addon-checker@v1.2 + with: + kodi-version: ${{ steps.extract_branch_pr.outputs.branch }} + addon-id: ${{ github.event.repository.name }} + is-pr: false + + - name: Generate distribution zip and submit to official kodi repository + id: kodi-addon-submitter + uses: xbmc/action-kodi-addon-submitter@v1.2 + with: # Replace all the below values + kodi-repository: ${{ steps.extract_branch_pr.outputs.repo }} + kodi-version: ${{ steps.extract_branch_pr.outputs.branch }} + addon-id: ${{ github.event.repository.name }} + env: # Make sure you create the below secrets (GH_TOKEN and EMAIL) + GH_USERNAME: ${{ github.repository_owner }} + GH_TOKEN: ${{secrets.GH_TOKEN}} + EMAIL: ${{secrets.EMAIL}} + + - name: Create Github Release + id: create_release + uses: actions/create-release@v1.0.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: Release ${{ github.ref }} + draft: false + prerelease: false + + - name: Upload Addon zip to github release + id: upload-release-asset + uses: actions/upload-release-asset@v1.0.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ steps.create_release.outputs.upload_url }} + asset_path: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} + asset_name: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} + asset_content_type: application/zip \ No newline at end of file From bb1ef32f3952078c9ae464c99b61410d47454658 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn <47431670+finkleandeinhorn@users.noreply.github.com> Date: Fri, 17 Jun 2022 12:37:30 -0700 Subject: [PATCH 10/23] Update addon.xml --- addon.xml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/addon.xml b/addon.xml index c16dca4..4085ad2 100644 --- a/addon.xml +++ b/addon.xml @@ -1,8 +1,7 @@ - + - - + @@ -12,5 +11,8 @@ all GNU GENERAL PUBLIC LICENSE Version 2.1, February 1999 https://github.com/phil65/script.module.autocompletion + + icon.png + From 79e02ba5661fa971eb0114ba55b7448dd9361f1b Mon Sep 17 00:00:00 2001 From: berkhornet <67786207+berkhornet@users.noreply.github.com> Date: Thu, 17 Nov 2022 14:11:37 +0000 Subject: [PATCH 11/23] Fix xbmc.translatePath --- lib/AutoCompletion.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 525854a..ad8fcaa 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -180,9 +180,9 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): now = time.time() hashed_url = hashlib.md5(url.encode('utf-8')).hexdigest() if folder: - cache_path = xbmc.translatePath(os.path.join(ADDON_DATA_PATH, folder)) + cache_path = xbmcvfs.translatePath(os.path.join(ADDON_DATA_PATH, folder)) else: - cache_path = xbmc.translatePath(os.path.join(ADDON_DATA_PATH)) + cache_path = xbmcvfs.translatePath(os.path.join(ADDON_DATA_PATH)) path = os.path.join(cache_path, hashed_url + ".txt") cache_seconds = int(cache_days * 86400.0) if xbmcvfs.exists(path) and ((now - os.path.getmtime(path)) < cache_seconds): From 31cf22371d3bb78b12ef5ff8675cadaee477f5c2 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 08:29:11 -0800 Subject: [PATCH 12/23] Update addon.xml --- addon.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/addon.xml b/addon.xml index 4085ad2..720b7bc 100644 --- a/addon.xml +++ b/addon.xml @@ -1,5 +1,5 @@ - + From 324daacea91e78ccb9b2b99b81ab6a121df1b98f Mon Sep 17 00:00:00 2001 From: finkleandeinhorn <47431670+finkleandeinhorn@users.noreply.github.com> Date: Mon, 21 Nov 2022 08:43:28 -0800 Subject: [PATCH 13/23] Update addon-submitter.yml --- .github/workflows/addon-submitter.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/addon-submitter.yml b/.github/workflows/addon-submitter.yml index 9a5bbed..4892080 100644 --- a/.github/workflows/addon-submitter.yml +++ b/.github/workflows/addon-submitter.yml @@ -43,7 +43,7 @@ jobs: kodi-version: ${{ steps.extract_branch_pr.outputs.branch }} addon-id: ${{ github.event.repository.name }} env: # Make sure you create the below secrets (GH_TOKEN and EMAIL) - GH_USERNAME: ${{ github.repository_owner }} + GH_USERNAME: ${{ github.actor }} GH_TOKEN: ${{secrets.GH_TOKEN}} EMAIL: ${{secrets.EMAIL}} @@ -67,4 +67,4 @@ jobs: upload_url: ${{ steps.create_release.outputs.upload_url }} asset_path: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} asset_name: ${{ steps.kodi-addon-submitter.outputs.addon-zip }} - asset_content_type: application/zip \ No newline at end of file + asset_content_type: application/zip From dcf283f73513428ac4f60e888148cd34f1925ee1 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 10:00:57 -0800 Subject: [PATCH 14/23] Address PR concerns --- .gitattributes | 8 ++++++++ addon.xml | 4 ++-- 2 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..5d3615c --- /dev/null +++ b/.gitattributes @@ -0,0 +1,8 @@ +.gitignore export-ignore +.gitattributes export-ignore +.github export-ignore +__pycache__ export-ignore +*.psd export-ignore +*.pyo export-ignore +*.pyc export-ignore +*.mo export-ignore \ No newline at end of file diff --git a/addon.xml b/addon.xml index 720b7bc..b58f310 100644 --- a/addon.xml +++ b/addon.xml @@ -9,8 +9,8 @@ Module providing some AutoCompletion functions Module providing some AutoCompletion functions all - GNU GENERAL PUBLIC LICENSE Version 2.1, February 1999 - https://github.com/phil65/script.module.autocompletion + GPL-2.0-or-later + https://github.com/finkleandeinhorn/script.module.autocompletion icon.png From 63fad2ce09a1f8630c27097ee04c487ebf32330c Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 10:32:50 -0800 Subject: [PATCH 15/23] Convert to f-strings --- lib/AutoCompletion.py | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index ad8fcaa..8740791 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -178,24 +178,23 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): get JSON response for *url, makes use of file cache. """ now = time.time() - hashed_url = hashlib.md5(url.encode('utf-8')).hexdigest() - if folder: - cache_path = xbmcvfs.translatePath(os.path.join(ADDON_DATA_PATH, folder)) - else: - cache_path = xbmcvfs.translatePath(os.path.join(ADDON_DATA_PATH)) - path = os.path.join(cache_path, hashed_url + ".txt") - cache_seconds = int(cache_days * 86400.0) + hashed_url = hashlib.md5(url.encode("utf-8")).hexdigest() + cache_path = xbmcvfs.translatePath(os.path.join(ADDON_DATA_PATH, folder) if folder else ADDON_DATA_PATH) + path = os.path.join(cache_path, f"{hashed_url}.txt") + cache_seconds = int(cache_days * 86400) + results = [] + if xbmcvfs.exists(path) and ((now - os.path.getmtime(path)) < cache_seconds): results = read_from_file(path) - log("loaded file for %s. time: %f" % (url, time.time() - now)) + log(f"loaded file for {url}. time: {float(time.time() - now)}") else: response = get_http(url, headers) try: results = json.loads(response) - log("download %s. time: %f" % (url, time.time() - now)) + log(f"download {url}. time: {float(time.time() - now)}") save_to_file(results, hashed_url, cache_path) except Exception: - log("Exception: Could not get new JSON data from %s. Tryin to fallback to cache" % url) + log(f"Exception: Could not get new JSON data from {url}. Trying to fallback to cache") log(response) if xbmcvfs.exists(path): results = read_from_file(path) @@ -222,7 +221,7 @@ def get_http(url=None, headers=False): raise Exception return r.text except Exception: - log("get_http: could not get data from %s" % url) + log(f"get_http: could not get data from {url}") monitor.waitForAbort(1) succeed += 1 return None @@ -237,18 +236,18 @@ def read_from_file(path="", raw=False): try: with xbmcvfs.File(path) as f: - log("opened textfile %s." % (path)) + log(f"opened textfile {path}.") if raw: return f.read() else: return json.load(f) except Exception: - log("failed to load textfile: " + path) + log(f"failed to load textfile: {path}") return False def log(txt): - message = u'%s: %s' % (ADDON_ID, txt) + message = f"{ADDON_ID}: {txt}" xbmc.log(msg=message, level=xbmc.LOGDEBUG) @@ -259,11 +258,11 @@ def save_to_file(content, filename, path=""): if not xbmcvfs.exists(path): xbmcvfs.mkdirs(path) - text_file_path = os.path.join(path, filename + ".txt") + text_file_path = os.path.join(path, f"{filename}.txt") now = time.time() with xbmcvfs.File(text_file_path, "w") as text_file: json.dump(content, text_file) - log("saved textfile %s. Time: %f" % (text_file_path, time.time() - now)) + log(f"saved textfile {text_file_path}. Time: {float(time.time() - now)}") return True From 53d86d7c2cc54a57422c52b5165c6355cb9d8e20 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 10:33:29 -0800 Subject: [PATCH 16/23] Better definition of ADDON fields --- lib/AutoCompletion.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 8740791..aa2203b 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -14,13 +14,12 @@ import xbmcaddon import xbmcvfs -HEADERS = {'User-agent': 'Mozilla/5.0'} - -ADDON = xbmcaddon.Addon() +SCRIPT_ID = "script.module.autocompletion" +ADDON = xbmcaddon.Addon(SCRIPT_ID) SETTING = ADDON.getSetting -ADDON_PATH = os.path.join(os.path.dirname(__file__), "..") -ADDON_ID = ADDON.getAddonInfo('id') -ADDON_DATA_PATH = xbmcvfs.translatePath("special://profile/addon_data/%s" % ADDON_ID) +ADDON_PATH = xbmcvfs.translatePath(ADDON.getAddonInfo("path")) +ADDON_ID = ADDON.getAddonInfo("id") +ADDON_DATA_PATH = xbmcvfs.translatePath(ADDON.getAddonInfo("profile")) def get_autocomplete_items(search_str, limit=10, provider=None): From b521d62caaa8b3ced205ac3bf80c3f77e92311c8 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 10:34:36 -0800 Subject: [PATCH 17/23] Abstractify providers --- lib/AutoCompletion.py | 144 +++++++++++++++++++++--------------------- 1 file changed, 72 insertions(+), 72 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index aa2203b..cd9d6f8 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -3,6 +3,7 @@ # Copyright (C) 2015 - Philipp Temminghoff # This program is Free Software see LICENSE file for details +from abc import ABC, abstractmethod from urllib.parse import quote_plus import os import time @@ -49,9 +50,17 @@ def prep_search_str(text): return text -class BaseProvider(object): +class BaseProvider(ABC): + + HEADERS = {'User-agent': 'Mozilla/5.0'} + def __init__(self, *args, **kwargs): self.limit = kwargs.get("limit", 10) + self.language = SETTING("autocomplete_lang") + + @abstractmethod + def build_url(self, query): + pass def get_predictions(self, search_str): if not search_str: @@ -59,8 +68,7 @@ def get_predictions(self, search_str): items = [] result = self.fetch_data(search_str) for i, item in enumerate(result): - li = {"label": item, - "search_string": prep_search_str(item)} + li = {"label": item, "search_string": prep_search_str(item)} items.append(li) if i > int(self.limit): break @@ -68,77 +76,72 @@ def get_predictions(self, search_str): def get_prediction_listitems(self, search_str): for item in self.get_predictions(search_str): - li = {"label": item, - "search_string": search_str} + li = {"label": item, "search_string": search_str} yield li + def fetch_data(self, search_str): + url = self.build_url(quote_plus(search_str)) + result = get_JSON_response(url=self.BASE_URL.format(endpoint=url), headers=self.HEADERS, folder=self.FOLDER) + return self.process_result(result) + + def process_result(self, result): + if not result or len(result) <= 1: + return [] + else: + return result[1] if isinstance(result[1], list) else result + class GoogleProvider(BaseProvider): - BASE_URL = "http://clients1.google.com/complete/" + BASE_URL = "http://clients1.google.com/complete/{endpoint}" + FOLDER = "Google" def __init__(self, *args, **kwargs): - super(GoogleProvider, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.youtube = kwargs.get("youtube", False) - def fetch_data(self, search_str): - url = "search?hl=%s&q=%s&json=t&client=serp" % ( - SETTING("autocomplete_lang"), - quote_plus(search_str), - ) + def build_url(self, query): + url = f"search?hl={self.language}&q={query}&json=t&client=serp" if self.youtube: url += "&ds=yt" - result = get_JSON_response(url=self.BASE_URL + url, - headers=HEADERS, - folder="Google") - if not result or len(result) <= 1: - return [] - else: - return result[1] + return url class BingProvider(BaseProvider): - BASE_URL = "http://api.bing.com/osjson.aspx?" + BASE_URL = "http://api.bing.com/osjson.aspx?{endpoint}" + FOLDER = "Bing" def __init__(self, *args, **kwargs): super(BingProvider, self).__init__(*args, **kwargs) - def fetch_data(self, search_str): - url = "query=%s" % (quote_plus(search_str)) - result = get_JSON_response( - url=self.BASE_URL + url, headers=HEADERS, folder="Bing" - ) - if not result: - return [] - else: - return result[1] + def build_url(self, query): + url = f"query={query}" + return url class TmdbProvider(BaseProvider): - BASE_URL = "https://www.themoviedb.org/search/multi?" + BASE_URL = "https://www.themoviedb.org/search/multi?{endpoint}" + FOLDER = "TMDB" def __init__(self, *args, **kwargs): super(TmdbProvider, self).__init__(*args, **kwargs) - def fetch_data(self, search_str): - url = "language=%s&query=%s" % ( - SETTING("autocomplete_lang"), - quote_plus(search_str), - ) - result = get_JSON_response( - url=self.BASE_URL + url, headers=HEADERS, folder="TMDB" - ) - if not result or "results" not in result: + def build_url(self, query): + url = f"language={self.language}&query={query}" + return url + + def process_result(self, result): + if not result or not (results := result.get("results")): return [] out = [] - for i in result["results"]: + for i in results: title = None - if "media_type" in i: - if i["media_type"] == "movie": + if media_type := i.get("media_type"): + if media_type == "movie": title = i["title"] - elif i["media_type"] in ["tv", "person"]: + elif media_type in ["tv", "person"]: title = i["name"] else: title = i @@ -149,27 +152,31 @@ def fetch_data(self, search_str): class LocalDictProvider(BaseProvider): def __init__(self, *args, **kwargs): super(LocalDictProvider, self).__init__(*args, **kwargs) + if local := SETTING("autocomplete_lang_local"): + self.language = local + else: + self.language = "en" - def get_predictions(self, search_str): - """ - get dict list with autocomplete labels from locally saved lists - """ - listitems = [] + def build_url(self, query): + return super().build_url(query) + + def fetch_data(self, search_str): k = search_str.rfind(" ") if k >= 0: - search_str = search_str[k + 1:] - local = SETTING("autocomplete_lang_local") - path = os.path.join(ADDON_PATH, "resources", "data", "common_%s.txt" % (local if local else "en")) + search_str = search_str[k + 1 :] + + path = os.path.join(ADDON_PATH, "resources", "data", f"common_{self.language}.txt") + suggestions = [] + with xbmcvfs.File(path) as f: for line in f.read().split('\n'): if not line.startswith(search_str) or len(line) <= 2: continue - li = {"label": line, - "search_string": line} - listitems.append(li) - if len(listitems) > int(self.limit): + suggestions.append(line) + if len(suggestions) > int(self.limit): break - return listitems + + return suggestions def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): @@ -195,30 +202,23 @@ def get_JSON_response(url="", cache_days=7.0, folder=False, headers=False): except Exception: log(f"Exception: Could not get new JSON data from {url}. Trying to fallback to cache") log(response) - if xbmcvfs.exists(path): - results = read_from_file(path) - else: - results = [] - if results: - return results - else: - return [] + results = read_from_file(path) + return results -def get_http(url=None, headers=False): + +def get_http(url, headers): """ fetches data from *url, returns it as a string """ succeed = 0 - if not headers: - headers = {'User-agent': 'XBMC/16.0 ( phil65@kodi.tv )'} monitor = xbmc.Monitor() while (succeed < 2) and (not monitor.abortRequested()): try: - r = requests.get(url, headers=headers) - if r.status_code != 200: + response = requests.get(url, headers=headers) + if not response.ok: raise Exception - return r.text + return response.text except Exception: log(f"get_http: could not get data from {url}") monitor.waitForAbort(1) @@ -231,7 +231,7 @@ def read_from_file(path="", raw=False): return data from file with *path """ if not xbmcvfs.exists(path): - return False + return [] try: with xbmcvfs.File(path) as f: @@ -242,7 +242,7 @@ def read_from_file(path="", raw=False): return json.load(f) except Exception: log(f"failed to load textfile: {path}") - return False + return [] def log(txt): From 238b3fd78676aadd9af0e9b8555cc835a20b2e00 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 11:19:41 -0800 Subject: [PATCH 18/23] Fix bug in syntax --- lib/AutoCompletion.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index cd9d6f8..c2af10a 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -133,9 +133,10 @@ def build_url(self, query): return url def process_result(self, result): - if not result or not (results := result.get("results")): + if not result or not result.get("results"): return [] out = [] + results = result.get("results") for i in results: title = None if media_type := i.get("media_type"): From 592e48ce18745712002492539c794ae043807522 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 11:25:50 -0800 Subject: [PATCH 19/23] Fix walruses --- lib/AutoCompletion.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index c2af10a..92ba947 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -139,11 +139,11 @@ def process_result(self, result): results = result.get("results") for i in results: title = None - if media_type := i.get("media_type"): - if media_type == "movie": - title = i["title"] - elif media_type in ["tv", "person"]: - title = i["name"] + media_type = i.get("media_type") + if media_type == "movie": + title = i["title"] + elif media_type in ["tv", "person"]: + title = i["name"] else: title = i out.append(title) @@ -153,7 +153,8 @@ def process_result(self, result): class LocalDictProvider(BaseProvider): def __init__(self, *args, **kwargs): super(LocalDictProvider, self).__init__(*args, **kwargs) - if local := SETTING("autocomplete_lang_local"): + local = SETTING("autocomplete_lang_local") + if local: self.language = local else: self.language = "en" From 242cdfeb84672880abd395c77321fae53947d92f Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 11:36:06 -0800 Subject: [PATCH 20/23] Fix addon settings --- lib/AutoCompletion.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 92ba947..769d4f1 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -16,11 +16,13 @@ import xbmcvfs SCRIPT_ID = "script.module.autocompletion" -ADDON = xbmcaddon.Addon(SCRIPT_ID) -SETTING = ADDON.getSetting -ADDON_PATH = xbmcvfs.translatePath(ADDON.getAddonInfo("path")) -ADDON_ID = ADDON.getAddonInfo("id") -ADDON_DATA_PATH = xbmcvfs.translatePath(ADDON.getAddonInfo("profile")) +SCRIPT_ADDON = xbmcaddon.Addon(SCRIPT_ID) +PLUGIN_ID = "plugin.program.autocompletion" +PLUGIN_ADDON = xbmcaddon.Addon(PLUGIN_ID) +SETTING = PLUGIN_ADDON.getSetting +ADDON_PATH = xbmcvfs.translatePath(SCRIPT_ADDON.getAddonInfo("path")) +ADDON_ID = SCRIPT_ADDON.getAddonInfo("id") +ADDON_DATA_PATH = xbmcvfs.translatePath(SCRIPT_ADDON.getAddonInfo("profile")) def get_autocomplete_items(search_str, limit=10, provider=None): From 5221798a7e0eb5e61df36c7a29b75fe82aeb1fba Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 10:35:15 -0800 Subject: [PATCH 21/23] Update addon.xml --- addon.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/addon.xml b/addon.xml index b58f310..67fb12e 100644 --- a/addon.xml +++ b/addon.xml @@ -1,5 +1,5 @@ - + From 81e89646c181677c3a3b420351f6b2f8a4839c50 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 11:50:27 -0800 Subject: [PATCH 22/23] Nicer settings --- lib/AutoCompletion.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/AutoCompletion.py b/lib/AutoCompletion.py index 769d4f1..fdf1bcc 100644 --- a/lib/AutoCompletion.py +++ b/lib/AutoCompletion.py @@ -31,13 +31,16 @@ def get_autocomplete_items(search_str, limit=10, provider=None): """ if xbmc.getCondVisibility("System.HasHiddenInput"): return [] - if SETTING("autocomplete_provider") == "youtube": + + setting = SETTING("autocomplete_provider").lower() + + if setting == "youtube": provider = GoogleProvider(youtube=True, limit=limit) - elif SETTING("autocomplete_provider") == "google": + elif setting == "google": provider = GoogleProvider(limit=limit) - elif SETTING("autocomplete_provider") == "bing": + elif setting == "bing": provider = BingProvider(limit=limit) - elif SETTING("autocomplete_provider") == "tmdb": + elif setting == "tmdb": provider = TmdbProvider(limit=limit) else: provider = LocalDictProvider(limit=limit) From e813a542b380874c709e8b7be9307da5f61f11d3 Mon Sep 17 00:00:00 2001 From: finkleandeinhorn Date: Mon, 21 Nov 2022 11:50:53 -0800 Subject: [PATCH 23/23] Update addon.xml --- addon.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/addon.xml b/addon.xml index 67fb12e..19f07c2 100644 --- a/addon.xml +++ b/addon.xml @@ -1,13 +1,13 @@ - + - Module providing some AutoCompletion functions - Module providing some AutoCompletion functions + Module providing some AutoCompletion functions + Module providing some AutoCompletion functions all GPL-2.0-or-later https://github.com/finkleandeinhorn/script.module.autocompletion