from setuphelpers import *
import platform
import json
import bs4 as BeautifulSoup
uninstallkey = []
bin_name_sub = 'ganttproject-%s.exe'
silent_args = '/S'
app_uninstallkey = 'GanttProject'
def install():
package_version = control.version.split('-', 1)[0]
bin_name = bin_name_sub % package_version
app_dir = makepath(programfiles32, 'GanttProject-%s' % '.'.join(package_version.split('.')[:2]))
app_version_file_path = makepath(app_dir, 'plugins', 'base', 'VERSION')
def get_soft_version(version=None):
if isfile(app_version_file_path):
return open(app_version_file_path, 'r').read().rsplit()[0]
else:
return '0.0.0'
print("Installing: %s" % bin_name)
install_exe_if_needed(bin_name,
silentflags=silent_args,
key=app_uninstallkey,
min_version=package_version,
get_version=get_soft_version,
)
for soft in installed_softwares(uninstallkey=app_uninstallkey):
if soft['version'] != package_version:
quiet_uninstall_string = soft['uninstall_string'] + ' /S'
register_uninstall(app_uninstallkey,
uninstallstring=soft['uninstall_string'],
quiet_uninstall_string=quiet_uninstall_string,
display_version=get_soft_version(),
publisher=control.editor,
)
""" for WAPT 2.0+
for soft in installed_softwares(uninstallkey=app_uninstallkey):
if soft['version'] != package_version:
quiet_uninstall_string = soft['uninstall_string'] + ' /S'
register_uninstall(app_uninstallkey,
uninstallstring=soft['uninstall_string'],
win64app=True,
quiet_uninstall_string=quiet_uninstall_string,
icon=makepath(app_dir, 'ganttproject.ico'),
display_version=get_soft_version(),
installed_size=int(str(control.installed_size)[:-3]),
publisher=control.editor,
) """
def update_package():
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
app_name = control.name
git_repo = 'bardsoftware/ganttproject'
url_api = 'https://api.github.com/repos/%s/releases/latest' % git_repo
bin_end = bin_name_sub.split('%s')[-1]
print("API used is: %s" % url_api)
json_load = json.loads(wgets(url_api, proxies=proxies))
for download in json_load['assets']:
if download['name'].endswith(bin_end):
url_dl = download['browser_download_url']
break
version = json_load['tag_name'].split('-')[-1]
latest_bin = bin_name_sub % version
print("Latest %s version is: %s" % (app_name,version))
print("Download url is: %s" % url_dl)
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(url_dl, latest_bin, proxies=proxies)
control.version = '%s-%s' % (version, control.version.split('-', 1)[-1])
control.save_control_to_wapt()
print("Changing package version to: %s in WAPT\\control" % control.version)
remove_outdated_binaries(version)
def get_proxies():
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
return getproxies()
def get_proxies_from_wapt_console():
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def get_version_from_binary(filename):
if filename.endswith('.msi'):
return get_msi_properties(filename)['ProductVersion']
else:
return get_file_properties(filename)['ProductVersion']
def remove_outdated_binaries(version, list_extensions=['exe','msi','deb','rpm','dmg','pkg'], list_filename_contain=None):
if type(list_extensions) != list:
list_extensions = [list_extensions]
if list_filename_contain:
if type(list_filename_contain) != list:
list_filename_contain = [list_filename_contain]
list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
for file_ext in list_extensions:
for bin_in_dir in glob.glob('*%s' % file_ext):
if not version in bin_in_dir:
remove_file(bin_in_dir)
if list_filename_contain:
for filename_contain in list_filename_contain:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
""""You may need to use a user agent for some websites.
Example: user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0')
"""
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)