# -*- coding: utf-8 -*-
from setuphelpers import *
try:
import bs4 as BeautifulSoup
except:
import BeautifulSoup
import requests
import platform
uninstallkey = []
# Installation procedure: https://www.sumatrapdfreader.org/docs/Installer-cmd-line-arguments.html
# Defining variables
bin_name_string = 'SumatraPDF-%s-install.exe'
silent_args = '-s -d "%s"' % makepath(programfiles,'SumatraPDF')
app_uninstallkey = 'SumatraPDF'
def install():
# Initializing variables
package_version = control.version.split('-')[0]
bin_name = bin_name_string % package_version
app_name = control.name
# Uninstalling older versions
for uninstall in installed_softwares(uninstallkey=app_uninstallkey):
if Version(uninstall['version']) < Version(package_version):
print('Older %s version found (%s)' % (app_name,uninstall['version']))
print('Removing: %s' % uninstall['name'])
run(uninstall_cmd(uninstall['key']))
# Installing the package
install_exe_if_needed(bin_name
,silentflags=silent_args
,key=app_uninstallkey
,min_version=package_version)
def update_package():
print('Download/Update package content from upstream binary sources')
result = False
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
# Initializing variables
app_name = control.name
url = control.sources
bin_end = bin_name_string.split('%s')[-1]
# Getting latest version from official website
page = requests.get(url,proxies=proxies,headers={"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36"}).text
bs = BeautifulSoup.BeautifulSoup(page)
bs_read = bs.findAll('a',{'onclick':'return SetupRedirect()'})
for link in bs_read:
if link['href'].endswith(bin_end):
#https://www.sumatrapdfreader.org/dl2/SumatraPDF-3.2-64-install.exe
url_dl = link['href']
latest_bin = url_dl.split('/')[-1]
version = latest_bin.split('-')[1]
print("Latest %s version is: %s" % (app_name,version))
print("Download url is: %s" % url_dl)
# Downloading latest binaries
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(url_dl, latest_bin, proxies=proxies)
# Checking version from file
version_from_file = get_version_from_binary(latest_bin)
if Version(version) != Version(version_from_file) and version_from_file != '':
print("Changing version to the version number of the binary (from: %s to: %s)" % (version, version_from_file))
os.rename(latest_bin, bin_name_sub % version_from_file)
version = version_from_file
# Changing version of the package
if Version(version) > Version(control.get_software_version()):
print("Software version updated (from: %s to: %s)" % (control.get_software_version(), Version(version)))
result = True
control.version = '%s-%s' % (Version(version), control.version.split('-', 1)[-1])
#control.set_software_version(Version(version))
control.save_control_to_wapt()
# Deleting outdated binaries
remove_outdated_binaries(version)
# Validating or not update-package-sources
return result
def get_proxies():
r"""Return system proxy with the urllib python library
>>> get_proxies()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
p = getproxies()
if p :
p['https'] = p['http']
return p
def get_proxies_from_wapt_console():
r"""Return proxy information from the current user WAPT console
>>> get_proxies_from_wapt_console()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get the first result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')['href']
'https://web-platform-tests.org/'
>>> bs_find('https://www.w3.org/', 'span', 'class', 'alt-logo').string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get a list of the result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
'https://web-platform-tests.org/'
>>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)
def get_version_from_binary(filename, property_name='ProductVersion'):
r""" Get installer version from file informations, for now, only exe and msi files are compatibles
Args:
filename (str): path to the file
property_name (str): selected property
Returns:
str: version number
"""
if filename.endswith('.msi'):
return get_msi_properties(filename)[property_name]
else:
return get_file_properties(filename)[property_name]
def remove_outdated_binaries(version, filename_contains=None, list_extensions=['exe','msi','deb','rpm','dmg','pkg']):
r"""Remove files based on the version contained in his filename
Args:
version (str): version number of keeped files
filename_contains (str or list of str): Part of the filename that must be contained (useful for distinguishing architecture and os)
list_extensions (str or list of str): file extensions of verified files
Returns:
None
.. versionadded:: 2.0
"""
if type(list_extensions) != list:
list_extensions = [list_extensions]
if filename_contains:
if type(filename_contains) != list:
filename_contains = [filename_contains]
list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
for file_ext in list_extensions:
for bin_in_dir in glob.glob('*%s' % file_ext):
if not version in bin_in_dir:
remove_file(bin_in_dir)
if filename_contains:
for filename_contain in filename_contains:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)