tis-waptpython-charset-normalizer
3.2.0-11
The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
150 downloads
See build result See VirusTotal scan

Description
- package : tis-waptpython-charset-normalizer
- name : waptpython charset-normalizer
- version : 3.2.0-11
- categories : Development
- maintainer : WAPT Team,Tranquil IT,Jimmy PELÉ
- installed_size : 7996425
- editor :
- licence :
- signature_date : 2023-08-12T15:06:03.562090
- size : 2.63 Mo
- locale : all
- target_os : all
- impacted_process :
- architecture : all
- Homepage : https://github.com/Ousret/charset_normalizer
- Depends :
control
package : tis-waptpython-charset-normalizer
version : 3.2.0-11
architecture : all
section : base
priority : optional
name : waptpython charset-normalizer
categories : Development
maintainer : WAPT Team,Tranquil IT,Jimmy PELÉ
description : The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
depends : tis-waptpython-pip
conflicts :
maturity : PROD
locale : all
target_os : all
min_wapt_version : 2.3
sources : https://pypi.org/project/charset-normalizer/#files
installed_size : 7996425
impacted_process :
description_fr :
description_pl :
description_de :
description_es :
description_pt :
description_it :
description_nl :
description_ru :
audit_schedule : 3h
editor :
keywords : waptpython,python,pip,pypi,package,charset,normalizer,encoding,detector,normalization,unicode,chardet,detect
licence :
homepage : https://github.com/Ousret/charset_normalizer
package_uuid : 2320849a-382e-47a4-9b37-d98d6d006cf7
valid_from :
valid_until :
forced_install_on :
changelog :
min_os_version :
max_os_version :
icon_sha256sum : 3307da49dca7842503f3369c908cd625f2b16276a8690e4dd37c08a057580c44
signer : Tranquil IT
signer_fingerprint: 8c5127a75392be9cc9afd0dbae1222a673072c308c14d88ab246e23832e8c6bb
signature : ZnbheRIZSLxu6LRcYKGW8cPCGsGVbp5uk4fDKm4ngxDWJT1zv0i31fARAR6bdDGppn2vMKlhAYiwJP8WHJXvbkNMnrLf6e6O6cCeCfvqBY1pamR5PQLigObKb+6q6DXPzXg1121b6Bde/7dUdk6XVqE/L0MDZOCRL6XQH/FcAy/SFb+WxwbBabVnfjd3bI2GybFVyeyNdutVHvVITVTGM73yk7XOzaIHKJurYsgiEPp3joNW5Y5Yygp14S7igRfigfiR7sodx4NuiZWcN+NIYJQpEAcmJbtiALJv2bPclXBXfEnQDd8636zAcfFTKuAQv2WK0Fkl3I4869mbSnncjw==
signature_date : 2023-08-12T15:06:03.562090
signed_attributes : package,version,architecture,section,priority,name,categories,maintainer,description,depends,conflicts,maturity,locale,target_os,min_wapt_version,sources,installed_size,impacted_process,description_fr,description_pl,description_de,description_es,description_pt,description_it,description_nl,description_ru,audit_schedule,editor,keywords,licence,homepage,package_uuid,valid_from,valid_until,forced_install_on,changelog,min_os_version,max_os_version,icon_sha256sum,signer,signer_fingerprint,signature_date,signed_attributes
Setup.py
# -*- coding: utf-8 -*-
from setuphelpers import *
import os
import sys
libdev_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", ""), "libdev"])
libdev_sp_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", ""), "libdev", "site-packages"])
sys.path.insert(0, libdev_dir)
sys.path.insert(0, libdev_sp_dir)
package_name = "charset-normalizer"
install_in_libdev = False # For now: False is recommended with modules generating bins and if any dependencies are present in "lib", make sure to remove them from requirements.txt
def install():
source_path = makepath(basedir, package_name)
mkdirs(libdev_sp_dir)
if isfile("requirements.txt"):
pip_install_cmd = f'"{get_waptpython_path()}" -m pip install -r requirements.txt --upgrade --no-deps'
else:
pip_install_cmd = f'"{get_waptpython_path()}" -m pip install {package_name} --upgrade --no-deps' # =={control.get_software_version()}
pip_install_cmd += f' --no-index --find-links "{source_path}"'
if install_in_libdev:
pip_install_cmd += f' --target "{libdev_sp_dir}"'
if force:
pip_install_cmd += " --force-reinstall"
print(run(pip_install_cmd))
def audit():
audit_status = "OK"
audit_pip_package = str(run(f'"{get_waptpython_path()}" -m pip show {package_name}', accept_returncodes=[0, 1, 3010]))
print(audit_pip_package)
if "not found" in audit_pip_package:
print(f"Reinstalling: {control.package}")
WAPT.install(control.package, force=True)
audit_status = "WARNING"
return audit_status
def uninstall():
pip_uninstall_cmd = f'"{get_waptpython_path()}" -m pip uninstall {package_name} -y'
print(run(pip_uninstall_cmd))
for lib_dir in glob.glob(f"{libdev_sp_dir}/{package_name}*/"):
remove_tree(lib_dir)
def get_waptpython_path():
if get_os_name() == "Windows":
return makepath(WAPT.wapt_base_dir, "waptpython.exe")
else:
return makepath(WAPT.wapt_base_dir, "waptpython.sh")
update_package.py
# -*- coding: utf-8 -*-
from setuphelpers import *
import os
import sys
libdev_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", ""), "libdev"])
libdev_sp_dir = os.sep.join([sys.executable.rsplit(os.path.sep, 1)[0].replace("/bin", ""), "libdev", "site-packages"])
sys.path.insert(0, libdev_dir)
sys.path.insert(0, libdev_sp_dir)
package_name = "charset-normalizer"
python_version = "".join(str(x) for x in sys.version_info[:2])
def update_package():
# Declaring local variables
package_updated = False
proxies = get_proxies()
if not proxies:
proxies = get_proxies_from_wapt_console()
api_url = f"https://pypi.org/pypi/{package_name}/json"
index_url = None
# index_url = "https://pip.ad.tranquil.it/"
# fixed_version = "9.5.0"
fixed_version = None
# Downloading whl files in package_name folder
if isfile("requirements.txt"):
pip_download_cmd = f'"{get_waptpython_path()}" -m pip download -r requirements.txt --only-binary=:all: --dest "{package_name}"'
else:
pip_download_cmd = f'"{get_waptpython_path()}" -m pip download {package_name} --only-binary=:all: --dest "{package_name}"'
if index_url:
pip_download_cmd += f' --index-url="{index_url}"'
elif proxies:
pip_download_cmd += f' --proxy="{proxies["http"]}"'
print(run(pip_download_cmd))
# Getting latest version information from official sources
print("API used is: %s" % api_url)
json_load = wgets(api_url, proxies=proxies, as_json=True)
if fixed_version:
json_search = json_load["releases"][fixed_version]
version = fixed_version
else:
json_search = json_load["urls"]
version = json_load["info"]["version"]
for to_download in json_search:
filename = to_download["filename"]
if ".whl" in filename and ("-py3-" in filename or f"cp{python_version}" in filename):
download_url = to_download["url"]
latest_bin = package_name + os.sep + filename
if not isfile(latest_bin):
print("Downloading: %s" % latest_bin)
wget(download_url, latest_bin, proxies=proxies)
else:
print("Binary is present: %s" % latest_bin)
# Deleting outdated binaries
for f in glob.glob("*.whl"):
print("Removing: %s" % f)
remove_file(f)
# Deleting other binaries
for f in glob.glob(f'{package_name}/{json_load["info"]["name"]}*.whl'): # filename.split("-")[0]
if not version in f:
print("Removing: %s" % f)
remove_file(f)
# Deleting other dirs
for dir_name in glob.glob("*/"):
dir_name = dir_name.split(os.sep)[0]
if not dir_name in ["WAPT", "__pycache__"] and not package_name in dir_name:
remove_tree(dir_name)
# generate_requirements_txt
if not isfile("requirements.txt"):
generate_requirements_txt(package_name, glob.glob(f"{package_name}/*.whl"))
else:
with open("requirements.txt") as f:
requirements_list = f.read().split("\n")
requirements_list = [a.split("==")[0] for a in requirements_list]
for package in glob.glob(f"{package_name}/*.whl"):
package_info = package.split(os.sep)[-1].split("-")
name = package_info[0].replace("_", "-").lower()
if not name in requirements_list:
remove_file(package)
# control autocompletion
api_url = f"https://pypi.org/pypi/{package_name}/json"
json_load = wgets(api_url, proxies=proxies, as_json=True)
control.package = control.package.split("-", 1)[0] + "-waptpython-" + json_load["info"]["name"].lower()
control.name = "waptpython " + json_load["info"]["name"]
control.installed_size = get_size(package_name) * 3
if json_load["info"].get("keywords") is not None and json_load["info"]["keywords"]:
print(f'You may wanna add: "{json_load["info"]["keywords"].lower()}" in: "control.keywords"')
control.description = json_load["info"]["summary"]
if json_load["info"]["project_urls"].get("Changelog") is not None and json_load["info"]["project_urls"]["Changelog"]:
control.changelog = json_load["info"]["project_urls"]["Changelog"]
control.sources = json_load["info"]["package_url"] + "#files"
if json_load["info"]["project_urls"].get("Homepage") is not None and json_load["info"]["project_urls"]["Homepage"]:
control.homepage = json_load["info"]["project_urls"]["Homepage"]
# Changing version of the package
if Version(version, 4) > Version(control.get_software_version(), 4):
print("Software version updated (from: %s to: %s)" % (control.get_software_version(), Version(version)))
package_updated = True
else:
print("Software version up-to-date (%s)" % Version(version))
control.set_software_version(version)
control.save_control_to_wapt()
# Validating update-package-sources
return package_updated
# # Changing version of the package and validating update-package-sources
# return complete_control_version(control, version)
def generate_requirements_txt(package_name, packages_list):
latest_versions = {}
for package in packages_list:
package_info = package.split(os.sep)[-1].split("-")
name = package_info[0].replace("_", "-").lower()
version = package_info[1]
if name not in latest_versions:
latest_versions[name] = version
else:
if version > latest_versions[name]:
latest_versions[name] = version
# if name == package_name:
# requirements_content = [f"{name}=={version}" for name, version in latest_versions.items()]
# else:
# requirements_content = [f"{name}" for name, version in latest_versions.items()]
requirements_content = [f"{name}" for name, version in latest_versions.items()]
with open("requirements.txt", "w", newline="\n") as requirements_file:
requirements_file.write("\n".join(requirements_content))
def get_waptpython_path():
if get_os_name() == "Windows":
return makepath(WAPT.wapt_base_dir, "waptpython.exe")
else:
return makepath(WAPT.wapt_base_dir, "waptpython.sh")
def get_size(start_path="."):
total_size = 0
for dirpath, dirnames, filenames in os.walk(start_path):
for f in filenames:
fp = os.path.join(dirpath, f)
# skip if it is symbolic link
if not os.path.islink(fp):
total_size += os.path.getsize(fp)
return total_size
cd667129310d2b1656c6afa2ebd52b2dc664062e8795f3f7c20ded2e591ec768 : setup.py
: __pycache__
8fb03f6616974164fdd1bcadda32832d4e0ed412b0e6d4b6c0e3201af4900933 : update_package.py
4fc50459d541859ca215ce6097f8428536192fa24130d68298fb58aa69fcff5d : requirements.txt
3307da49dca7842503f3369c908cd625f2b16276a8690e4dd37c08a057580c44 : WAPT/icon.png
a5a97261381e1d0ad46ee15916abec9c2631d0201f5cc50ceb0197a165a0bbbf : WAPT/certificate.crt
a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl
e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl
8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl
1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-win32.whl
95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl
e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl
e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl
89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl
8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6 : charset-normalizer/charset_normalizer-3.2.0-py3-none-any.whl
a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl
2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl
6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl
45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl
ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl
1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329 : charset-normalizer/charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl
aef3ca165354ac147ab584d5f3ffc173dca4744caf9eb9784447df584ac9e365 : luti.json
54868809116e70c461435b19f49c2c977882e4f78477683810fe1716b7cf730d : WAPT/control