HEX
Server: LiteSpeed
System: Linux atali.colombiahosting.com.co 5.14.0-570.12.1.el9_6.x86_64 #1 SMP PREEMPT_DYNAMIC Tue May 13 06:11:55 EDT 2025 x86_64
User: coopserp (1713)
PHP: 8.2.29
Disabled: dl,exec,passthru,proc_open,proc_close,shell_exec,memory_limit,system,popen,curl_multi_exec,show_source,symlink,link,leak,listen,diskfreespace,tmpfile,ignore_user_abord,highlight_file,source,show_source,fpaththru,virtual,posix_ctermid,posix_getcwd,posix_getegid,posix_geteuid,posix_getgid,posix_getgrgid,posix_getgrnam,posix_getgroups,posix_getlogin,posix_getpgid,posix_getpgrp,posix_getpid,posix,posix_getppid,posix_getpwnam,posix_getpwuid,posix_getrlimit,posix_getsid,posix_getuid,posix_isatty,posix_kill,posix_mkfifo,posix_setegid,posix_seteuid,posix_setgid,posix_setpgid,posix_setsid,posix_setid,posix_times,posix_ttyname,posix_uname,proc_get_status,proc_nice,proc_terminate
Upload Files
File: //opt/cloudlinux/venv/lib64/python3.11/site-packages/setuptools/tests/config/downloads/__init__.py
from __future__ import annotations

import re
import time
from pathlib import Path
from urllib.error import HTTPError
from urllib.request import urlopen

__all__ = ["DOWNLOAD_DIR", "retrieve_file", "output_file", "urls_from_file"]


NAME_REMOVE = ("http://", "https://", "github.com/", "/raw/")
DOWNLOAD_DIR = Path(__file__).parent


# ----------------------------------------------------------------------
# Please update ./preload.py accordingly when modifying this file
# ----------------------------------------------------------------------


def output_file(url: str, download_dir: Path = DOWNLOAD_DIR) -> Path:
    file_name = url.strip()
    for part in NAME_REMOVE:
        file_name = file_name.replace(part, '').strip().strip('/:').strip()
    return Path(download_dir, re.sub(r"[^\-_\.\w\d]+", "_", file_name))


def retrieve_file(url: str, download_dir: Path = DOWNLOAD_DIR, wait: float = 5) -> Path:
    path = output_file(url, download_dir)
    if path.exists():
        print(f"Skipping {url} (already exists: {path})")
    else:
        download_dir.mkdir(exist_ok=True, parents=True)
        print(f"Downloading {url} to {path}")
        try:
            download(url, path)
        except HTTPError:
            time.sleep(wait)  # wait a few seconds and try again.
            download(url, path)
    return path


def urls_from_file(list_file: Path) -> list[str]:
    """``list_file`` should be a text file where each line corresponds to a URL to
    download.
    """
    print(f"file: {list_file}")
    content = list_file.read_text(encoding="utf-8")
    return [url for url in content.splitlines() if not url.startswith("#")]


def download(url: str, dest: Path):
    with urlopen(url) as f:
        data = f.read()

    with open(dest, "wb") as f:
        f.write(data)

    assert Path(dest).exists()