tis-pidgin
2.14.1-21
Instant Messaging for Jabber, AIM, Sametime, XMPP, GTK libraries and Rocket.chat plugin
6125 downloads
View on


Description
- package : tis-pidgin
- version : 2.14.1-21
- architecture : all
- categories :
- maintainer : Simon Fonteneau
- description : Instant Messaging for Jabber, AIM, Sametime, XMPP, GTK libraries and Rocket.chat plugin
- locale :
- target_os : windows
- min_wapt_version :
- sources :
- installed_size :
- impacted_process : pidgin.exe
- description_fr : Messagerie instantanee pour Jabber, AIM, Sametime, XMPP, inclus les librairies GTK et le plugin Rocket.chat
- description_pl :
- description_de :
- description_es :
- description_pt :
- description_it :
- description_nl :
- description_ru :
- editor :
- licence :
- signature_date : 2021-12-22T09:32:25.968764
Setup.py
# -*- coding: utf-8 -*-
from setuphelpers import *
import platform
import bs4 as BeautifulSoup
import json
def install():
# Declaring local variables
package_version = control.get_software_version()
bin_name = glob.glob('pidgin-*-offline.exe')[0]
# Installing the package
print("Installing: %s" % bin_name)
install_exe_if_needed(bin_name,
silentflags='/S',
key='Pidgin',
min_version=package_version,
)
def update_package():
# Declaring local variables
result = False
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
app_name = control.name
url_api = 'https://sourceforge.net/projects/pidgin/best_release.json'
# Getting latest version information from official sources
print("API used is: %s" % url_api)
json_load = json.loads(wgets(url_api, proxies=proxies))
for download in json_load['platform_releases']:
if '-offline.exe' in json_load['platform_releases'][download]['filename']:
url_dl = json_load['platform_releases'][download]['url']
version = json_load['platform_releases'][download]['filename'].split('/')[-2]
latest_bin = json_load['platform_releases'][download]['filename'].split('/')[-1]
break
print("Latest %s version is: %s" % (app_name, version))
print("Download URL is: %s" % url_dl)
# Downloading latest binaries
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(url_dl, latest_bin, proxies=proxies)
# Checking version from file
version_from_file = get_version_from_binary(latest_bin)
if version != version_from_file and version_from_file != '':
print("Changing version to the version number of the binary (from: %s to: %s)" % (version, version_from_file))
os.rename(latest_bin, latest_bin.replace(version, version_from_file))
version = version_from_file
else:
print('Already up to date, skipped')
# Changing version of the package
if Version(version) != control.get_software_version():
print("Software updated to version: %s" % Version(version))
result = True
control.version = '%s-%s' % (version, control.version.split('-', 1)[-1])
#control.set_software_version(version)
control.save_control_to_wapt()
# Deleting outdated binaries
remove_outdated_binaries(version)
# Validating update-package-sources
return True
def get_proxies():
r"""Return system proxy with the urllib python library
>>> get_proxies()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
return getproxies()
def get_proxies_from_wapt_console():
r"""Return proxy information from the current user WAPT console
>>> get_proxies_from_wapt_console()
{'http': 'http://srvproxy.ad.domain.lan:8080',
'https': 'http://srvproxy.ad.domain.lan:8080'}
"""
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get the first result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')['href']
'https://web-platform-tests.org/'
>>> bs_find('https://www.w3.org/', 'span', 'class', 'alt-logo').string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, verify=False, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url,verify=False, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
r""""Parse html web page with BeautifulSoup and get a list of the result
Args:
url (str): url of the web page to parse
element (str): searched element
attribute (str): selected attribute of the element
value (str): value of the selected attribute
user_agent (str): specify a user-agent if needed
proxies (dict): specify your proxy if needed
**kwargs (str): joker for requests parameters
features (str): bs feature to use
>>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
'https://web-platform-tests.org/'
>>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
'W3C'
.. versionadded:: 2.0
"""
import requests
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)
def get_version_from_binary(filename, property_name='ProductVersion'):
r""" Get installer version from file informations, for now, only exe and msi files are compatibles
Args:
filename (str): path to the file
property_name (str): selected property
Returns:
str: version number
"""
if filename.endswith('.msi'):
return get_msi_properties(filename)[property_name]
else:
return get_file_properties(filename)[property_name]
def remove_outdated_binaries(version, filename_contains=None, list_extensions=['exe','msi','deb','rpm','dmg','pkg']):
r"""Remove files based on the version contained in his filename
Args:
version (str): version number of keeped files
filename_contains (str or list of str): Part of the filename that must be contained (useful for distinguishing architecture and os)
list_extensions (str or list of str): file extensions of verified files
Returns:
None
.. versionadded:: 2.0
"""
if type(list_extensions) != list:
list_extensions = [list_extensions]
if filename_contains:
if type(filename_contains) != list:
filename_contains = [filename_contains]
list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
for file_ext in list_extensions:
for bin_in_dir in glob.glob('*%s' % file_ext):
if not version in bin_in_dir:
remove_file(bin_in_dir)
if filename_contains:
for filename_contain in filename_contains:
if not filename_contain in bin_in_dir:
remove_file(bin_in_dir)