# -*- coding: utf-8 -*-
from setuphelpers import *
import json
import platform
import bs4 as BeautifulSoup
import requests
import time
uninstallkey = []
# Defining variables
ext_name = 'MS-CEINTL.vscode-language-pack-fr.vsix'
ext_unique_identifier = 'MS-CEINTL.vscode-language-pack-fr'
app_from_path = 'code'
def install():
# Initializing variables
if get_os_name() == 'Windows':
app_dir = makepath(programfiles, 'Microsoft VS Code')
elif get_os_name() == 'Linux':
app_dir = '/usr/share/code/'
elif get_os_name() == 'Darwin':
app_dir = '/Applications/Visual Studio Code.app/'
ext_path = makepath(app_dir, ext_name)
# Copying extension to app_dir
print("Copying %s to %s" % (ext_name, app_dir))
filecopyto(ext_name, app_dir)
def session_setup():
# Initializing variables
if get_os_name() == 'Windows':
app_dir = makepath(programfiles, 'Microsoft VS Code')
elif get_os_name() == 'Linux':
app_dir = '/usr/share/code/'
elif get_os_name() == 'Darwin':
app_dir = '/Applications/Visual Studio Code.app/'
ext_path = makepath(app_dir, ext_name)
# Installing extension in user env
print("Installing %s extension in user env" % ext_name)
run_notfatal('%s --install-extension "%s"' % (app_from_path, ext_path))
def update_package():
# Initializing variables
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
app_name = control.name
url = 'https://marketplace.visualstudio.com/items/%s' % ext_unique_identifier
# Getting latest version from official sources
print('URL used is %s' % url)
for bs_search in bs_find_all(url, 'script', 'type', 'application/json', proxies=proxies, user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36 Edg/88.0.705.68'):
if bs_search.string.startswith('{'):
json_data = json.loads(bs_search.string)
break
version = json_data['Resources']['Version']
url_dl = 'https://marketplace.visualstudio.com/_apis/public/gallery/publishers/%s/vsextensions/%s/%s/vspackage' % (json_data['Resources']['PublisherName'], json_data['Resources']['ExtensionName'], version)
latest_bin = ext_name
print("Latest %s version is %s" % (app_name, version))
print("Download url is %s" % url_dl)
# Downloading latest binaries
print('Downloading %s' % latest_bin)
time.sleep(1)
wget(url_dl, latest_bin, proxies=proxies, user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36 Edg/88.0.705.68')
# Changing version of the package
control.version = '%s-%s' % (version, control.version.split('-', 1)[-1])
control.save_control_to_wapt()
print("Changing package version to %s in WAPT\\control" % control.version)
def get_os_name():
return platform.system()
def get_proxies():
import platform
if platform.python_version_tuple()[0] == '3':
from urllib.request import getproxies
else:
from urllib import getproxies
return getproxies()
def get_proxies_from_wapt_console():
proxies = {}
if platform.system() == 'Windows':
waptconsole_ini_path = makepath(user_local_appdata(), 'waptconsole', 'waptconsole.ini')
else:
waptconsole_ini_path = makepath(user_home_directory(), '.config', 'waptconsole', 'waptconsole.ini')
if isfile(waptconsole_ini_path):
proxy_wapt = inifile_readstring(waptconsole_ini_path, 'global', 'http_proxy')
if proxy_wapt:
proxies = {'http': proxy_wapt, 'https': proxy_wapt}
return proxies
def bs_find(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
""""You may need to use a user agent for some websites.
Example: user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0')
"""
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find(element, {attribute: value})
else:
return soup.find(element)
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features='html.parser', **kwargs):
""""You may need to use a user agent for some websites.
Example: user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:78.0) Gecko/20100101 Firefox/78.0')
"""
if user_agent:
page = requests.get(url, proxies=proxies, headers={'User-Agent':'%s' % user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute:value})
else:
return soup.find_all(element)