tis-seafile

8.0.5-6
Client for cloud file storage Seafile
13714 downloads
Download
View on luti logo
tis-seafile icon

  Description 

  • package : tis-seafile
  • version : 8.0.5-6
  • architecture : all
  • categories : Office,Media
  • maintainer : WAPT Team,Jimmy PELÉ,Hubert TOUVET,Amelie LE JEUNE
  • description : Client for cloud file storage Seafile
  • locale : all
  • target_os : debian_based
  • min_wapt_version : 1.5
  • sources : https://www.seafile.com/en/download/
  • installed_size : 216985600
  • impacted_process : seafile-applet,seaf-daemon,shellext-fix
  • description_fr : Client Windows pour les système de partage de fichiers Seafile
  • description_pl :
  • description_de :
  • description_es :
  • description_pt :
  • description_it :
  • description_nl :
  • description_ru :
  • editor : Seafile Ltd
  • licence : GPL
  • signature_date : 2021-12-20T11:03:43.344260
  • Homepage : https://www.seafile.com/

  Setup.py 

# -*- coding: utf-8 -*-
from setuphelpers import *
import platform
import bs4 as BeautifulSoup


# Declaring specific app values (TO CHANGE)
package_name = 'seafile-gui'

def install():
    # Installing the package
    print('Installing %s' % package_name)
    install_apt('apt-transport-https')
    run('wget https://linux-clients.seafile.com/seafile.asc -O /usr/share/keyrings/seafile-keyring.asc')
    run('bash -c "echo \'deb [arch=amd64 signed-by=/usr/share/keyrings/seafile-keyring.asc] https://linux-clients.seafile.com/seafile-deb/$(lsb_release -c -s)/ stable main\' > /etc/apt/sources.list.d/seafile.list"')
    install_apt(package_name)
    

def uninstall():
    # Uninstalling the package
    print('Uninstalling %s' % package_name)
    uninstall_apt(package_name,autoremove=True)
    
def update_package():
    result = False
    proxies = get_proxies()
    if not proxies:
        proxies = get_proxies_from_wapt_console()

    url = 'https://manual.seafile.com/changelog/client-changelog/'
    # Getting latest version from official website

    for u in bs_find_all(url,'li', proxies=proxies):
        if 'Win'not in u.text and 'Mac' not in u.text and u.string is not None:
            previous_element = u.previous_element
            while True:
                if previous_element.name =='h3':
                    latest_version = previous_element.text.split(' ')[0] 
                    break
                else:
                    previous_element = previous_element.previous_element
            break        
            
    print('Latest Version %s' % latest_version)

    # changing version of the package
    if Version(latest_version) != control.get_software_version():
        result = True
    control.version = '%s-%s' % (latest_version, control.version.split('-', 1)[-1])
    #control.set_software_version(version)
    control.save_control_to_wapt()

    # deleting outdated binaries
    remove_outdated_binaries(latest_version)

    # validating or not update-package-sources
    return result
   

def get_proxies():
    r"""Return system proxy with the urllib python library

    >>> get_proxies()
    {'http': 'http://srvproxy.ad.domain.lan:8080',
    'https': 'http://srvproxy.ad.domain.lan:8080'}

    """
    if platform.python_version_tuple()[0] == '3':
        from urllib.request import getproxies
    else:
        from urllib import getproxies
    return getproxies()


def get_proxies_from_wapt_console():
    r"""Return proxy information from the current user WAPT console

    >>> get_proxies_from_wapt_console()
    {'http': 'http://srvproxy.ad.domain.lan:8080',
    'https': 'http://srvproxy.ad.domain.lan:8080'}

    """
    proxies = {}
    if platform.system() == 'Windows':
        waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
    else:
        waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
    if isfile(waptconsole_ini_path):
        proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
        if proxy_wapt:
            proxies = {'http': proxy_wapt, 'https': proxy_wapt}
    return proxies


def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
    r""""Parse html web page with BeautifulSoup and get the first result

    Args:
        url (str): url of the web page to parse
        element (str): searched element
        attribute (str): selected attribute of the element
        value (str): value of the selected attribute
        user_agent (str): specify a user-agent if needed
        proxies (dict): specify your proxy if needed
        **kwargs (str): joker for requests parameters
        features (str): bs feature to use


    >>> bs_find('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')['href']
    'https://web-platform-tests.org/'

    >>> bs_find('https://www.w3.org/', 'span', 'class', 'alt-logo').string
    'W3C'

    .. versionadded:: 2.0

    """
    import requests
    if user_agent:
        page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
    else:
        page = requests.get(url, proxies=proxies, **kwargs).text
    soup = BeautifulSoup.BeautifulSoup(page, features=features)
    if value:
        return soup.find(element, {attribute: value})
    else:
        return soup.find(element)


def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
    r""""Parse html web page with BeautifulSoup and get a list of the result

    Args:
        url (str): url of the web page to parse
        element (str): searched element
        attribute (str): selected attribute of the element
        value (str): value of the selected attribute
        user_agent (str): specify a user-agent if needed
        proxies (dict): specify your proxy if needed
        **kwargs (str): joker for requests parameters
        features (str): bs feature to use


    >>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
    'https://web-platform-tests.org/'

    >>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
    'W3C'

    .. versionadded:: 2.0

    """
    import requests
    if user_agent:
        page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
    else:
        page = requests.get(url, proxies=proxies, **kwargs).text
    soup = BeautifulSoup.BeautifulSoup(page, features=features)
    if value:
        return soup.find_all(element, {attribute:value})
    else:
        return soup.find_all(element)


def get_version_from_binary(filename, property_name='ProductVersion'):
    r""" Get installer version from file informations, for now, only exe and msi files are compatibles

    Args:
        filename (str): path to the file
        property_name (str): selected property

    Returns:
        str: version number

     """
    if filename.endswith('.msi'):
        return get_msi_properties(filename)[property_name]
    else:
        return get_file_properties(filename)[property_name]


def remove_outdated_binaries(version, filename_contains=None, list_extensions=['exe','msi','deb','rpm','dmg','pkg']):
    r"""Remove files based on the version contained in his filename

    Args:
        version (str): version number of keeped files
        filename_contains (str or list of str): Part of the filename that must be contained (useful for distinguishing architecture and os)
        list_extensions (str or list of str): file extensions of verified files

    Returns:
        None

    .. versionadded:: 2.0

    """
    if type(list_extensions) != list:
        list_extensions = [list_extensions]
    if filename_contains:
        if type(filename_contains) != list:
            filename_contains = [filename_contains]
    list_extensions = ['.' + ext for ext in list_extensions if ext[0] != '.']
    for file_ext in list_extensions:
        for bin_in_dir in glob.glob('*%s' % file_ext):
            if not version in bin_in_dir:
                remove_file(bin_in_dir)
            if filename_contains:
                for filename_contain in filename_contains:
                    if not filename_contain in bin_in_dir:
                        remove_file(bin_in_dir)


										

  Changelog 



Changelog software url : https://download.seafile.com/published/seafile-manual/changelog/client-changelog.md



No changelog.txt.
									
  manifest.sha256