# -*- coding: utf-8 -*-
from setuphelpers import *
import platform
import bs4 as BeautifulSoup
r"""
Usable WAPT package functions: install(), uninstall(), session_setup(), audit(), update_package()
"""
# Declaring global variables - Warnings: 1) WAPT context is only available in package functions; 2) Global variables are not persistent between calls
bin_contains = 'git-'
app_uninstall_path = '/usr/local/git/uninstall.sh'
def install():
# Declaring local variables
bin_name = glob.glob('*%s*.dmg' % bin_contains)[0]
# Installing the software.
print("Installing: %s" % bin_name)
install_dmg(bin_name)
filecopyto('uninstall.sh', app_uninstall_path)
run('chmod +x %s' % app_uninstall_path)
def uninstall():
# Uninstalling the software
run(app_uninstall_path)
def update_package():
# Declaring local variables
result = False
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
app_name = control.name
url = 'https://sourceforge.net/projects/git-osx-installer/files/'
# Getting latest version from official sources
print("URL used is: %s" % url)
for bs_search in bs_find_all(url, 'tr', 'class', 'file', proxies=proxies, timeout=10):
if bin_contains in bs_search['title']:
latest_bin = bs_search['title']
break
version = latest_bin.split('-')[1]
download_url = 'https://kumisystems.dl.sourceforge.net/project/git-osx-installer/%s' % latest_bin
print("Latest %s version is: %s" % (app_name, version))
print("Download URL is: %s" % download_url)
# Downloading latest binaries
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(download_url, latest_bin, proxies=proxies)
# Changing version of the package
if Version(version) > Version(control.get_software_version()):
print("Software version updated (from: %s to: %s)" % (control.get_software_version(), Version(version)))
result = True
else:
print("Software version up-to-date (%s)" % Version(version))
control.version = '%s-%s' % (Version(version), control.version.split('-', 1)[-1])
#control.set_software_version(version)
control.save_control_to_wapt()
# Deleting outdated binaries
remove_outdated_binaries(version)
# Validating or not update-package-sources
return result
def get_proxies():
r"""Return system proxy with the urllib python library
>>> get_proxies()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
return getproxies()
def get_proxies_from_wapt_console():
r"""Return proxy information from the current user WAPT console
>>> get_proxies_from_wapt_console()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get the first result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')['href']
'https://web-platform-tests.org/'
>>> bs_find('https://www.w3.org/', 'span', 'class', 'alt-logo').string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get a list of the result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
'https://web-platform-tests.org/'
>>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)
def get_version_from_binary(filename, property_name='ProductVersion'):
r""" Get installer version from file informations, for now, only exe and msi files are compatibles
Args:
filename (str): path to the file
property_name (str): selected property
Returns:
str: version number
"""
if filename.endswith('.msi'):
return get_msi_properties(filename)[property_name]
else:
return get_file_properties(filename)[property_name]
def remove_outdated_binaries(version, filename_contains=None, list_extensions=['exe','msi','deb','rpm','dmg','pkg']):
r"""Remove files based on the version contained in his filename
Args:
version (str): version number of keeped files
filename_contains (str or list of str): Part of the filename that must be contained (useful for distinguishing architecture and os)
list_extensions (str or list of str): file extensions of verified files
Returns:
None
.. versionadded:: 2.0
"""
if type(list_extensions) != list:
list_extensions = [list_extensions]
if filename_contains:
if type(filename_contains) != list:
filename_contains = [filename_contains]
list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
for file_ext in list_extensions:
for bin_in_dir in glob.glob('*%s' % file_ext):
if not version in bin_in_dir:
remove_file(bin_in_dir)
if filename_contains:
for filename_contain in filename_contains:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)