# -*- coding: utf-8 -*-
from setuphelpers import *
import platform
import json
uninstallkey = []
# Defining variables
bin_name_sub = 'Zotero-%s_setup.exe'
silent_args = '-ms'
app_uninstallkey_sub = 'Zotero %s (x86 en-US)'
def install():
# Initializing variables
package_version = control.get_software_version()
bin_name = bin_name_sub % package_version
app_uninstallkey = app_uninstallkey_sub % package_version
# Installing the package
print("Installing: %s" % bin_name)
install_exe_if_needed(bin_name,
silentflags=silent_args,
key=app_uninstallkey,
min_version=package_version,
)
def update_package():
# Initializing variables
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
app_name = control.name
url = 'https://www.zotero.org/download/'
# Getting latest version from official sources
print('URL used is: %s' % url)
for bs_search in bs_find_all(url, 'script', 'type', 'text/javascript', proxies=proxies):
temp_str = str(bs_search)
if 'win32' in temp_str:
dict_version = json.loads(temp_str.split(':', 1)[-1].split('\n', 1)[0].split('}')[0] + '}')
version = dict_version['win32']
latest_bin = bin_name_sub % version
url_dl = 'https://www.zotero.org/download/client/dl?channel=release&platform=win32&version=%s' % version
break
print("Latest %s version is: %s" % (app_name, version))
print("Download url is: %s" % url_dl)
# Downloading latest binaries
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(url_dl, latest_bin, proxies=proxies)
# Changing version of the package
control.version = '%s-%s' % (version, control.version.split('-', 1)[-1])
control.save_control_to_wapt()
# Deleting outdated binaries
remove_outdated_binaries(version)
def get_proxies():
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
return getproxies()
def get_proxies_from_wapt_console():
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def get_version_from_binary(filename, parameter='ProductVersion'):
if filename.endswith('.msi'):
return get_msi_properties(filename)[parameter]
else:
return get_file_properties(filename)[parameter]
def remove_outdated_binaries(version, list_extensions=['exe','msi','deb','rpm','dmg','pkg','zip'], list_filename_contain=None):
if type(list_extensions) != list:
list_extensions = [list_extensions]
if list_filename_contain:
if type(list_filename_contain) != list:
list_filename_contain = [list_filename_contain]
list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
for file_ext in list_extensions:
for bin_in_dir in glob.glob('*%s' % file_ext):
if not version in bin_in_dir:
remove_file(bin_in_dir)
if list_filename_contain:
for filename_contain in list_filename_contain:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)
def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
""""You may need to use a user agent for some websites.
Example: user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0')
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
def bs_find_all(url, element, attribute=None, value=None, headers=None, proxies=None, features='html.parser', **kwargs):
""""You may need to use a header for some websites. For example: headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0'}
"""
import requests
page = requests.get(url, proxies=proxies, headers=headers, **kwargs).text
try:
import bs4 as BeautifulSoup
soup = BeautifulSoup.BeautifulSoup(page, features=features)
except:
import BeautifulSoup
soup = BeautifulSoup.BeautifulSoup(page)
if value:
return soup.findAll(element,{attribute:value})
else:
return soup.findAll(element)