tis-waptpython-cffi
1.15.1-15
Foreign Function Interface for Python calling C code.
170 downloads
See build result See VirusTotal scan
Description
- package : tis-waptpython-cffi
- name : waptpython cffi
- version : 1.15.1-15
- categories : Development
- maintainer : WAPT Team,Tranquil IT,Jimmy PELÉ
- installed_size : 14060715
- editor :
- licence :
- signature_date : 2023-09-17T20:10:30.150194
- size : 4.67 Mo
- locale : all
- target_os : all
- impacted_process :
- architecture : x64,x86
- Homepage : http://cffi.readthedocs.org
- Depends :
control
package : tis-waptpython-cffi
version : 1.15.1-15
architecture : x64,x86
section : base
priority : optional
name : waptpython cffi
categories : Development
maintainer : WAPT Team,Tranquil IT,Jimmy PELÉ
description : Foreign Function Interface for Python calling C code.
depends : tis-waptpython-pip
conflicts :
maturity : PROD
locale : all
target_os : all
min_wapt_version : 2.4
sources : https://pypi.org/project/cffi/#files
installed_size : 14060715
impacted_process :
description_fr :
description_pl :
description_de :
description_es :
description_pt :
description_it :
description_nl :
description_ru :
audit_schedule :
editor :
keywords : waptpython,python,pip,pypi,package,cffi
licence :
homepage : http://cffi.readthedocs.org
package_uuid : 21abc018-912a-4a90-999c-5599767295a9
valid_from :
valid_until :
forced_install_on :
changelog : https://cffi.readthedocs.io/en/latest/whatsnew.html
min_os_version :
max_os_version :
icon_sha256sum : 3307da49dca7842503f3369c908cd625f2b16276a8690e4dd37c08a057580c44
signer : Tranquil IT
signer_fingerprint: 8c5127a75392be9cc9afd0dbae1222a673072c308c14d88ab246e23832e8c6bb
signature : W/amNDmq4oQl73MX+kqEU8244u5wjL2ZRpoJM3ZfCCflVLYhZPdJghO0o/Pk8FhNfp65bogNicTPbdBvkE1PerLntZmh3/jnZsQwwl23JExluWBaVNs11Ft7mORUXcKwIdm2qiVXg6FqfzJobE9PkTjSqfbYbDXF6FHjSzprEE1x0DZj4/6YQ+F1HdXoG0ujMKKgodK+EHD+y/EkxYqnDxZSERijw+VwmlGmVhIdCbSnm974xLxsphRJYayfytBnCEO8bcwyrIP74Tkeil4U9ANgITgA+8QHIvj1h6efMndnZnQSmImzijQCnmeqMwMifso7fPPaOt+aGg0DXLzhIA==
signature_date : 2023-09-17T20:10:30.150194
signed_attributes : package,version,architecture,section,priority,name,categories,maintainer,description,depends,conflicts,maturity,locale,target_os,min_wapt_version,sources,installed_size,impacted_process,description_fr,description_pl,description_de,description_es,description_pt,description_it,description_nl,description_ru,audit_schedule,editor,keywords,licence,homepage,package_uuid,valid_from,valid_until,forced_install_on,changelog,min_os_version,max_os_version,icon_sha256sum,signer,signer_fingerprint,signature_date,signed_attributes
Setup.py
# -*- coding: utf-8 -*-
from setuphelpers import *
import os
import sys
ext_lib_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", "").replace("\\Scripts", ""), "selenium"])
sys.path.insert(0, ext_lib_dir)
package_name = "cffi"
install_in_ext_lib = True
def install():
source_path = makepath(basedir, package_name)
mkdirs(ext_lib_dir)
if isfile("requirements.txt"):
pip_install_cmd = f'"{get_waptpython_path()}" -m pip install -r requirements.txt --upgrade --no-deps'
else:
pip_install_cmd = f'"{get_waptpython_path()}" -m pip install {package_name} --upgrade --no-deps' # =={control.get_software_version()}
pip_install_cmd += f' --no-index --find-links "{source_path}"'
if install_in_ext_lib:
pip_install_cmd += f' --target "{ext_lib_dir}"'
if force:
pip_install_cmd += " --force-reinstall"
print(run(pip_install_cmd))
def audit():
audit_status = "OK"
if not install_in_ext_lib:
pip_show_cmd = str(run(f'"{get_waptpython_path()}" -m pip show {package_name}', accept_returncodes=[0, 1, 3010])).strip()
print(pip_show_cmd)
if "No module named pip" in pip_show_cmd:
pip_package = dependency_package = control.package.split("-")[0] + "-waptpython-pip"
print(f"Reinstalling: {pip_package}")
WAPT.install(pip_package, force=True)
audit_status = "WARNING"
if "not found" in pip_show_cmd:
print(f"Reinstalling: {control.package}")
WAPT.install(control.package, force=True)
audit_status = "WARNING"
else:
if glob.glob(f"{ext_lib_dir}/{package_name}-{control.get_software_version()}.dist-info/*"):
print(f"{control.package} is installed and up-to-date.")
elif glob.glob(f"{ext_lib_dir}/{package_name}/*"):
print(f"{control.package} is installed in another version.")
else:
print(f"{control.package} is not installed.")
print(f"Reinstalling: {control.package}")
WAPT.install(control.package, force=True)
audit_status = "WARNING"
return audit_status
def uninstall():
if not install_in_ext_lib:
pip_uninstall_cmd = f'"{get_waptpython_path()}" -m pip uninstall {package_name} -y'
print(run(pip_uninstall_cmd))
for lib_dir in glob.glob(f"{ext_lib_dir}/{package_name}*/"):
remove_tree(lib_dir)
def get_waptpython_path():
if get_os_name() == "Windows":
return makepath(WAPT.wapt_base_dir, "waptpython.exe")
else:
return makepath(WAPT.wapt_base_dir, "waptpython.sh")
update_package.py
# -*- coding: utf-8 -*-
from setuphelpers import *
from setupdevhelpers import *
import os
import sys
import bs4 as BeautifulSoup
from urllib.parse import urlparse, urljoin
# ext_lib_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", "").replace("\\Scripts", ""), "selenium"])
# sys.path.insert(0, ext_lib_dir)
package_name = "cffi"
python_version = "".join(str(x) for x in sys.version_info[:2])
def update_package():
# Declaring local variables
package_updated = False
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
api_url = f"https://pypi.org/pypi/{package_name}/json"
if params.get("running_as_luti"):
index_url = "https://pip.ad.tranquil.it/"
else:
index_url = None
# fixed_version = "9.5.0"
fixed_version = None
# Downloading whl files in package_name folder
if isfile("requirements.txt"):
pip_download_cmd = f'"{get_waptpython_path()}" -m pip download -r requirements.txt --only-binary=:all: --dest "{package_name}"'
else:
pip_download_cmd = f'"{get_waptpython_path()}" -m pip download {package_name} --only-binary=:all: --dest "{package_name}"'
if index_url:
pip_download_cmd += f' --index-url="{index_url}" --trusted-host="{index_url.split("//")[1].split("/")[0]}"'
pip_download_cmd += f' --proxy=""'
elif proxies:
pip_download_cmd += f' --proxy="{proxies["http"]}"'
print(run(pip_download_cmd))
# Getting latest version information from official sources
print("API used is: %s" % api_url)
json_load = wgets(api_url, proxies=proxies, as_json=True)
if fixed_version:
json_search = json_load["releases"][fixed_version]
version = fixed_version
else:
json_search = json_load["urls"]
version = json_load["info"]["version"]
if not index_url:
for to_download in json_search:
filename = to_download["filename"]
if ".whl" in filename and ("-py3-" in filename or f"cp{python_version}" in filename):
download_url = to_download["url"]
latest_bin = package_name + os.sep + filename
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(download_url, latest_bin, proxies=proxies)
else:
print("Binary is present: %s" % latest_bin)
else:
binaries_dict = bs_index_of_to_dict(index_url + package_name, proxies=proxies)
for to_download in binaries_dict.keys():
filename = to_download
if ".whl" in filename and ("-py3-" in filename or f"cp{python_version}" in filename) and version in filename:
download_url = binaries_dict[to_download]
latest_bin = package_name + os.sep + filename
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(download_url, latest_bin, proxies=proxies)
else:
print("Binary is present: %s" % latest_bin)
# Deleting outdated binaries
for f in glob.glob("*.whl"):
print("Removing: %s" % f)
remove_file(f)
# Deleting other binaries
for f in glob.glob(f'{package_name}/{json_load["info"]["name"]}*.whl'): # filename.split("-")[0]
if not version in f:
print("Removing: %s" % f)
remove_file(f)
# Deleting other dirs
for dir_name in glob.glob("*/"):
dir_name = dir_name.split(os.sep)[0]
if not dir_name in ["WAPT", "__pycache__"] and not package_name in dir_name:
remove_tree(dir_name)
# generate_requirements_txt
if not isfile("requirements.txt"):
generate_requirements_txt(package_name, glob.glob(f"{package_name}/*.whl"))
else:
with open("requirements.txt") as f:
requirements_list = f.read().split("\n")
requirements_list = [a.split("==")[0] for a in requirements_list]
for package in glob.glob(f"{package_name}/*.whl"):
package_info = package.split(os.sep)[-1].split("-")
name = package_info[0].replace("_", "-").lower()
if not name in requirements_list:
remove_file(package)
# control autocompletion
api_url = f"https://pypi.org/pypi/{package_name}/json"
json_load = wgets(api_url, proxies=proxies, as_json=True)
control.package = control.package.split("-", 1)[0] + "-waptpython-" + json_load["info"]["name"].lower()
control.name = "waptpython " + json_load["info"]["name"]
control.installed_size = get_size(package_name) * 3
if json_load["info"].get("keywords") is not None and json_load["info"]["keywords"]:
print(f'You may wanna add: "{json_load["info"]["keywords"].lower()}" in: "control.keywords"')
control.description = json_load["info"]["summary"]
if json_load["info"]["project_urls"].get("Changelog") is not None and json_load["info"]["project_urls"]["Changelog"]:
control.changelog = json_load["info"]["project_urls"]["Changelog"]
control.sources = json_load["info"]["package_url"] + "#files"
if json_load["info"]["project_urls"].get("Homepage") is not None and json_load["info"]["project_urls"]["Homepage"]:
control.homepage = json_load["info"]["project_urls"]["Homepage"]
# Changing version of the package
if Version(version, 4) > Version(control.get_software_version(), 4):
print("Software version updated (from: %s to: %s)" % (control.get_software_version(), Version(version)))
package_updated = True
else:
print("Software version up-to-date (%s)" % Version(version))
control.set_software_version(version)
control.save_control_to_wapt()
# Validating update-package-sources
return package_updated
# # Changing version of the package and validating update-package-sources
# return complete_control_version(control, version)
def bs_index_of_to_dict(url, **kwargs):
r"""Parse html "Index of" web page with BeautifulSoup and get a dict of the result
Args:
url (str): url of the web page to parse
Return:
result: a dict of the index URLs
"""
result = {}
for elem in bs_find_all(url, "a", "href"):
if not "://" in elem["href"]:
# absolute_link = urljoin(url, url + "/" + elem["href"] if not url.endswith("/") else url + elem["href"])
absolute_link = url + "/" + elem["href"] if not url.endswith("/") else url + elem["href"]
else:
absolute_link = elem["href"]
key = elem.text
if key.endswith("..>"): # if key.endswith("..>"):
key = elem["href"]
result[key] = absolute_link
return result
def is_url(x):
try:
result = urlparse(x)
return all([result.scheme, result.netloc])
except:
return False
def bs_find_all(url, element, attribute=None, value=None, user_agent=None, proxies=None, features="html.parser", **kwargs):
"""
Parse an HTML or XML web page with BeautifulSoup and retrieve a list of all matching results.
Args:
url (str): URL of the web page or string to parse.
element (str): Searched element.
attribute (str): Selected attribute of the element.
value (str): Value of the selected attribute.
user_agent (str): Specify a user-agent if needed.
proxies (dict): Specify proxies if needed.
features (str): BeautifulSoup feature to use.
**kwargs: Additional parameters for the requests library.
Returns:
list: List of bs4.element.Tag objects representing the matching elements.
Examples:
>>> bs_find_all('https://www.w3.org/', 'a', 'title', 'Open Web Platform testing')[0]['href']
'https://web-platform-tests.org/'
>>> bs_find_all('https://www.w3.org/', 'span', 'class', 'alt-logo')[0].string
'W3C'
.. versionadded:: 2.0
.. versionchanged:: 2.5
Function can now parse string content of a page or reparse a "bs_result". It is now possible to parse a specific attribute.
"""
url = str(url)
if is_url(url):
if user_agent:
page = requests.get(url, proxies=proxies, headers={"User-Agent": user_agent}, **kwargs).text
else:
page = requests.get(url, proxies=proxies, **kwargs).text
else:
page = url
soup = BeautifulSoup.BeautifulSoup(page, features=features)
if value:
return soup.find_all(element, {attribute: value})
elif attribute:
return soup.find_all(element, attrs={attribute: True})
else:
return soup.find_all(element)
def generate_requirements_txt(package_name, packages_list):
latest_versions = {}
for package in packages_list:
package_info = package.split(os.sep)[-1].split("-")
name = package_info[0].replace("_", "-").lower()
version = package_info[1]
if name not in latest_versions:
latest_versions[name] = version
else:
if version > latest_versions[name]:
latest_versions[name] = version
# if name == package_name:
# requirements_content = [f"{name}=={version}" for name, version in latest_versions.items()]
# else:
# requirements_content = [f"{name}" for name, version in latest_versions.items()]
requirements_content = [f"{name}" for name, version in latest_versions.items()]
if requirements_content:
with open("requirements.txt", "w", newline="\n") as requirements_file:
requirements_file.write("\n".join(requirements_content))
def get_waptpython_path():
if get_os_name() == "Windows":
return makepath(WAPT.wapt_base_dir, "waptpython.exe")
else:
return makepath(WAPT.wapt_base_dir, "waptpython.sh")
def get_size(start_path="."):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
# skip if it is symbolic link
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size
55fe4a4caa2c65c63209d708ba432e85017ce466e22b879f241bc6656d79a2c9 : setup.py
: __pycache__
8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 : cffi/cffi-1.15.1-cp38-cp38-win32.whl
921aee58fa7738f26df7a6cba2bdfbd3ceb31ef5092036831da56e005a014aa9 : cffi/cffi-1.15.1-cp38-cp38-macosx_11_0_arm64.whl
41e77b536cb7bf0263e19a74e7992a21f2dd3991a411d744a06b8165cd677681 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_31_armv7l.libffi7.whl
239536a46c22154c3cd37e2f645fc46a29a0037ce11268349d8d8f5533ab2059 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_28_aarch64.libffi6.whl
8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 : cffi/pycparser-2.21-py2.py3-none-any.whl
4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
b9a2d114a8a59d0368efb45e566d08266d930172e42096eca4c4361033cde2e1 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_31_aarch64.libffi7.whl
08c11bd9973166c1d4467a32f4862e2b5bb4943ff6102eae4006b940e7800b03 : cffi/cffi-1.15.1-cp38-cp38-linux_armv7l.whl
3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl
87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl
a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl
00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 : cffi/cffi-1.15.1-cp38-cp38-win_amd64.whl
1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a : cffi/cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl
5dddaa717b67a8375616d0b7f04fcc8c0855ca1256a6925eee541a2adf4a5449 : cffi/cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl
5f84f7a02ebabe9aae21e891cb0787bc8956615176caf6977c9fa258c6cafce5 : update_package.py
f1bdaad864fbcd590e1800efb723cf7a295bb2c4b0de7ff795f73fc94e48b42e : requirements.txt
3307da49dca7842503f3369c908cd625f2b16276a8690e4dd37c08a057580c44 : WAPT/icon.png
a5a97261381e1d0ad46ee15916abec9c2631d0201f5cc50ceb0197a165a0bbbf : WAPT/certificate.crt
b8ced37133a13d7038f6b880da5012251502d3e7263b8d5ca2ea440a6cad2189 : luti.json
62f69ce5a58e5a10a81ebe897340034f773a2f80bec32e7a47b0411a91fb92a2 : WAPT/control